1 #ifdef USE_PRAGMA_IDENT_SRC
   2 #pragma ident "@(#)dump.cpp     1.33 07/05/23 10:53:38 JVM"
   3 #endif
   4 /*
   5  * Copyright 2003-2007 Sun Microsystems, Inc.  All Rights Reserved.
   6  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   7  *
   8  * This code is free software; you can redistribute it and/or modify it
   9  * under the terms of the GNU General Public License version 2 only, as
  10  * published by the Free Software Foundation.
  11  *
  12  * This code is distributed in the hope that it will be useful, but WITHOUT
  13  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  14  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  15  * version 2 for more details (a copy is included in the LICENSE file that
  16  * accompanied this code).
  17  *
  18  * You should have received a copy of the GNU General Public License version
  19  * 2 along with this work; if not, write to the Free Software Foundation,
  20  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  21  *
  22  * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
  23  * CA 95054 USA or visit www.sun.com if you need additional information or
  24  * have any questions.
  25  *  
  26  */
  27 
  28 # include "incls/_precompiled.incl"
  29 # include "incls/_dump.cpp.incl"
  30 
  31 
  32 // Closure to set up the fingerprint field for all methods.
  33 
  34 class FingerprintMethodsClosure: public ObjectClosure {
  35 public:
  36   void do_object(oop obj) {
  37     if (obj->is_method()) {
  38       methodOop mobj = (methodOop)obj;
  39       ResourceMark rm;
  40       (new Fingerprinter(mobj))->fingerprint();
  41     }
  42   }
  43 };
  44 
  45 
  46 
  47 // Closure to set the hash value (String.hash field) in all of the
  48 // String objects in the heap.  Setting the hash value is not required.
  49 // However, setting the value in advance prevents the value from being
  50 // written later, increasing the likelihood that the shared page contain
  51 // the hash can be shared.
  52 //
  53 // NOTE THAT the algorithm in StringTable::hash_string() MUST MATCH the
  54 // algorithm in java.lang.String.hashCode().
  55 
  56 class StringHashCodeClosure: public OopClosure {
  57 private:
  58   Thread* THREAD;
  59   int hash_offset;
  60 public:
  61   StringHashCodeClosure(Thread* t) {
  62     THREAD = t;
  63     hash_offset = java_lang_String::hash_offset_in_bytes();
  64   }
  65 
  66   void do_oop(oop* pobj) {
  67     if (pobj != NULL) {
  68       oop obj = *pobj;
  69       if (obj->klass() == SystemDictionary::string_klass()) {
  70 
  71         int hash;
  72         typeArrayOop value = java_lang_String::value(obj);
  73         int length = java_lang_String::length(obj);
  74         if (length == 0) {
  75           hash = 0;
  76         } else {
  77           int offset = java_lang_String::offset(obj);
  78           jchar* s = value->char_at_addr(offset);
  79           hash = StringTable::hash_string(s, length);
  80         }
  81         obj->int_field_put(hash_offset, hash);
  82       }
  83     }
  84   }
  85 };
  86 
  87 
  88 // Remove data from objects which should not appear in the shared file
  89 // (as it pertains only to the current JVM).
  90 
  91 class RemoveUnshareableInfoClosure : public ObjectClosure {
  92 public:
  93   void do_object(oop obj) {
  94     // Zap data from the objects which is pertains only to this JVM.  We
  95     // want that data recreated in new JVMs when the shared file is used.
  96     if (obj->is_method()) {
  97       ((methodOop)obj)->remove_unshareable_info();
  98     }
  99     else if (obj->is_klass()) {
 100       Klass::cast((klassOop)obj)->remove_unshareable_info();
 101     }
 102 
 103     // Don't save compiler related special oops (shouldn't be any yet).
 104     if (obj->is_methodData() || obj->is_compiledICHolder()) {
 105       ShouldNotReachHere();
 106     }
 107   }
 108 };
 109 
 110 
 111 static bool mark_object(oop obj) {
 112   if (obj != NULL &&
 113       !obj->is_shared() &&
 114       !obj->is_forwarded() &&
 115       !obj->is_gc_marked()) {
 116     obj->set_mark(markOopDesc::prototype()->set_marked());
 117     return true;
 118   }
 119   
 120   return false;
 121 }
 122 
 123 // Closure:  mark objects closure.
 124 
 125 class MarkObjectsOopClosure : public OopClosure {
 126 public:
 127   void do_oop(oop* pobj) {
 128     mark_object(*pobj);
 129   }
 130 };
 131 
 132 
 133 class MarkObjectsSkippingKlassesOopClosure : public OopClosure {
 134 public:
 135   void do_oop(oop* pobj) {
 136     oop obj = *pobj;
 137     if (obj != NULL &&
 138         !obj->is_klass()) {
 139       mark_object(obj);
 140     }
 141   }
 142 };
 143 
 144 
 145 static void mark_object_recursive_skipping_klasses(oop obj) {
 146   mark_object(obj);
 147   if (obj != NULL) {
 148     MarkObjectsSkippingKlassesOopClosure mark_all;
 149     obj->oop_iterate(&mark_all);
 150   }
 151 }
 152 
 153 
 154 // Closure:  mark common read-only objects, excluding symbols
 155 
 156 class MarkCommonReadOnly : public ObjectClosure {
 157 private:
 158   MarkObjectsOopClosure mark_all;
 159 public:
 160   void do_object(oop obj) {
 161 
 162     // Mark all constMethod objects.
 163 
 164     if (obj->is_constMethod()) {
 165       mark_object(obj);
 166       mark_object(constMethodOop(obj)->stackmap_data());
 167       // Exception tables are needed by ci code during compilation.
 168       mark_object(constMethodOop(obj)->exception_table());
 169     }
 170 
 171     // Mark objects referenced by klass objects which are read-only.
 172 
 173     else if (obj->is_klass()) {
 174       Klass* k = Klass::cast((klassOop)obj);
 175       mark_object(k->secondary_supers());
 176 
 177       // The METHODS() OBJARRAYS CANNOT BE MADE READ-ONLY, even though
 178       // it is never modified. Otherwise, they will be pre-marked; the
 179       // GC marking phase will skip them; and by skipping them will fail
 180       // to mark the methods objects referenced by the array.
 181 
 182       if (obj->blueprint()->oop_is_instanceKlass()) {
 183         instanceKlass* ik = instanceKlass::cast((klassOop)obj);
 184         mark_object(ik->method_ordering());
 185         mark_object(ik->local_interfaces());
 186         mark_object(ik->transitive_interfaces());
 187         mark_object(ik->fields());
 188 
 189         mark_object(ik->class_annotations());
 190 
 191         mark_object_recursive_skipping_klasses(ik->fields_annotations());
 192         mark_object_recursive_skipping_klasses(ik->methods_annotations());
 193         mark_object_recursive_skipping_klasses(ik->methods_parameter_annotations());
 194         mark_object_recursive_skipping_klasses(ik->methods_default_annotations());
 195 
 196         typeArrayOop inner_classes = ik->inner_classes();
 197         if (inner_classes != NULL) {
 198           mark_object(inner_classes);
 199         }
 200       }
 201     }
 202   }
 203 };
 204 
 205 
 206 // Closure:  mark common symbols
 207 
 208 class MarkCommonSymbols : public ObjectClosure {
 209 private:
 210   MarkObjectsOopClosure mark_all;
 211 public:
 212   void do_object(oop obj) {
 213 
 214     // Mark symbols refered to by method objects.
 215 
 216     if (obj->is_method()) {
 217       methodOop m = methodOop(obj);
 218       mark_object(m->name());
 219       mark_object(m->signature());
 220     }
 221 
 222     // Mark symbols referenced by klass objects which are read-only.
 223 
 224     else if (obj->is_klass()) {
 225 
 226       if (obj->blueprint()->oop_is_instanceKlass()) {
 227         instanceKlass* ik = instanceKlass::cast((klassOop)obj);
 228         mark_object(ik->name());
 229         mark_object(ik->generic_signature());
 230         mark_object(ik->source_file_name());
 231         mark_object(ik->source_debug_extension());
 232 
 233         typeArrayOop inner_classes = ik->inner_classes();
 234         if (inner_classes != NULL) {
 235           int length = inner_classes->length();
 236           for (int i = 0;
 237                    i < length;
 238                    i += instanceKlass::inner_class_next_offset) {
 239             int ioff = i + instanceKlass::inner_class_inner_name_offset;
 240             int index = inner_classes->ushort_at(ioff);
 241             if (index != 0) {
 242               mark_object(ik->constants()->symbol_at(index));
 243             }
 244           }
 245         }
 246         ik->field_names_and_sigs_iterate(&mark_all);
 247       }
 248     }
 249 
 250     // Mark symbols referenced by other constantpool entries.
 251 
 252     if (obj->is_constantPool()) {
 253       constantPoolOop(obj)->shared_symbols_iterate(&mark_all);
 254     }
 255   }
 256 };
 257 
 258 
 259 // Closure:  mark char arrays used by strings
 260 
 261 class MarkStringValues : public ObjectClosure {
 262 private:
 263   MarkObjectsOopClosure mark_all;
 264 public:
 265   void do_object(oop obj) {
 266 
 267     // Character arrays referenced by String objects are read-only.
 268 
 269     if (java_lang_String::is_instance(obj)) {
 270       mark_object(java_lang_String::value(obj));
 271     }
 272   }
 273 };
 274 
 275 
 276 #ifdef DEBUG
 277 // Closure:  Check for objects left in the heap which have not been moved.
 278 
 279 class CheckRemainingObjects : public ObjectClosure {
 280 private:
 281   int count;
 282 
 283 public:
 284   CheckRemainingObjects() {
 285     count = 0;
 286   }
 287 
 288   void do_object(oop obj) {
 289     if (!obj->is_shared() &&
 290         !obj->is_forwarded()) {
 291       ++count;
 292       if (Verbose) {
 293         tty->print("Unreferenced object: ");
 294         obj->print_on(tty);
 295       }
 296     }
 297   }
 298 
 299   void status() {
 300     tty->print_cr("%d objects no longer referenced, not shared.", count);
 301   }
 302 };
 303 #endif
 304 
 305 
 306 // Closure:  Mark remaining objects read-write, except Strings.
 307 
 308 class MarkReadWriteObjects : public ObjectClosure {
 309 private:
 310   MarkObjectsOopClosure mark_objects;
 311 public:
 312   void do_object(oop obj) {
 313 
 314       // The METHODS() OBJARRAYS CANNOT BE MADE READ-ONLY, even though
 315       // it is never modified. Otherwise, they will be pre-marked; the
 316       // GC marking phase will skip them; and by skipping them will fail
 317       // to mark the methods objects referenced by the array.
 318 
 319     if (obj->is_klass()) {
 320       mark_object(obj);
 321       Klass* k = klassOop(obj)->klass_part();
 322       mark_object(k->java_mirror());
 323       if (obj->blueprint()->oop_is_instanceKlass()) {
 324         instanceKlass* ik = (instanceKlass*)k;
 325         mark_object(ik->methods());
 326         mark_object(ik->constants());
 327       }
 328       if (obj->blueprint()->oop_is_javaArray()) {
 329         arrayKlass* ak = (arrayKlass*)k;
 330         mark_object(ak->component_mirror());
 331       }
 332       return;
 333     }
 334 
 335     // Mark constantPool tags and the constantPoolCache.
 336 
 337     else if (obj->is_constantPool()) {
 338       constantPoolOop pool = constantPoolOop(obj);
 339       mark_object(pool->cache());
 340       pool->shared_tags_iterate(&mark_objects);
 341       return;
 342     }
 343 
 344     // Mark all method objects.
 345 
 346     if (obj->is_method()) {
 347       mark_object(obj);
 348     }
 349   }
 350 };
 351 
 352 
 353 // Closure:  Mark String objects read-write.
 354 
 355 class MarkStringObjects : public ObjectClosure {
 356 private:
 357   MarkObjectsOopClosure mark_objects;
 358 public:
 359   void do_object(oop obj) {
 360 
 361     // Mark String objects referenced by constant pool entries.
 362 
 363     if (obj->is_constantPool()) {
 364       constantPoolOop pool = constantPoolOop(obj);
 365       pool->shared_strings_iterate(&mark_objects);
 366       return;
 367     }
 368   }
 369 };
 370 
 371 
 372 // Move objects matching specified type (ie. lock_bits) to the specified
 373 // space.
 374 
 375 class MoveMarkedObjects : public ObjectClosure {
 376 private:
 377   OffsetTableContigSpace* _space;
 378   bool _read_only;
 379 
 380 public:
 381   MoveMarkedObjects(OffsetTableContigSpace* space, bool read_only) {
 382     _space = space;
 383     _read_only = read_only;
 384   }
 385 
 386   void do_object(oop obj) {
 387     if (obj->is_shared()) {
 388       return;
 389     }
 390     if (obj->is_gc_marked() && obj->forwardee() == NULL) {
 391       int s = obj->size();
 392       oop sh_obj = (oop)_space->allocate(s);
 393       if (sh_obj == NULL) {
 394         if (_read_only) {
 395           warning("\nThe permanent generation read only space is not large "
 396                   "enough to \npreload requested classes.  Use "
 397                   "-XX:SharedReadOnlySize= to increase \nthe initial "
 398                   "size of the read only space.\n");
 399         } else {
 400           warning("\nThe permanent generation read write space is not large "
 401                   "enough to \npreload requested classes.  Use "
 402                   "-XX:SharedReadWriteSize= to increase \nthe initial "
 403                   "size of the read write space.\n");
 404         }
 405         exit(2);
 406       }
 407       if (PrintSharedSpaces && Verbose && WizardMode) {
 408         tty->print_cr("\nMoveMarkedObjects: " PTR_FORMAT " -> " PTR_FORMAT " %s", obj, sh_obj,
 409                       (_read_only ? "ro" : "rw"));
 410       }
 411       Copy::aligned_disjoint_words((HeapWord*)obj, (HeapWord*)sh_obj, s);
 412       obj->forward_to(sh_obj);
 413       if (_read_only) {
 414         // Readonly objects: set hash value to self pointer and make gc_marked.
 415         sh_obj->forward_to(sh_obj);
 416       } else {
 417         sh_obj->init_mark();
 418       }
 419     }
 420   }
 421 };
 422 
 423 static void mark_and_move(oop obj, MoveMarkedObjects* move) {
 424   if (mark_object(obj)) move->do_object(obj);
 425 }
 426 
 427 enum order_policy {
 428   OP_favor_startup = 0,
 429   OP_balanced = 1,
 430   OP_favor_runtime = 2
 431 };
 432 
 433 static void mark_and_move_for_policy(order_policy policy, oop obj, MoveMarkedObjects* move) {
 434   if (SharedOptimizeColdStartPolicy >= policy) mark_and_move(obj, move);
 435 }
 436 
 437 class MarkAndMoveOrderedReadOnly : public ObjectClosure {
 438 private:
 439   MoveMarkedObjects *_move_ro;
 440 
 441 public:
 442   MarkAndMoveOrderedReadOnly(MoveMarkedObjects *move_ro) : _move_ro(move_ro) {}
 443 
 444   void do_object(oop obj) {
 445     if (obj->is_klass() && obj->blueprint()->oop_is_instanceKlass()) {
 446       instanceKlass* ik = instanceKlass::cast((klassOop)obj);
 447       int i;
 448 
 449       mark_and_move_for_policy(OP_favor_startup, ik->name(), _move_ro);
 450 
 451       if (ik->super() != NULL) {
 452         do_object(ik->super());
 453       }
 454 
 455       objArrayOop interfaces = ik->local_interfaces();
 456       mark_and_move_for_policy(OP_favor_startup, interfaces, _move_ro);
 457       for(i = 0; i < interfaces->length(); i++) {
 458         klassOop k = klassOop(interfaces->obj_at(i));
 459         mark_and_move_for_policy(OP_favor_startup, k->klass_part()->name(), _move_ro);
 460         do_object(k);
 461       }
 462       
 463       objArrayOop methods = ik->methods();
 464       for(i = 0; i < methods->length(); i++) {
 465         methodOop m = methodOop(methods->obj_at(i));
 466         mark_and_move_for_policy(OP_favor_startup, m->constMethod(), _move_ro);
 467         mark_and_move_for_policy(OP_favor_runtime, m->constMethod()->exception_table(), _move_ro);
 468         mark_and_move_for_policy(OP_favor_runtime, m->constMethod()->stackmap_data(), _move_ro);
 469 
 470         // We don't move the name symbolOop here because it may invalidate
 471         // method ordering, which is dependent on the address of the name
 472         // symbolOop.  It will get promoted later with the other symbols.
 473         // Method name is rarely accessed during classloading anyway.
 474         // mark_and_move_for_policy(OP_balanced, m->name(), _move_ro);
 475 
 476         mark_and_move_for_policy(OP_favor_startup, m->signature(), _move_ro);
 477       }
 478 
 479       mark_and_move_for_policy(OP_favor_startup, ik->transitive_interfaces(), _move_ro);
 480       mark_and_move_for_policy(OP_favor_startup, ik->fields(), _move_ro);
 481       
 482       mark_and_move_for_policy(OP_favor_runtime, ik->secondary_supers(),  _move_ro);
 483       mark_and_move_for_policy(OP_favor_runtime, ik->method_ordering(),   _move_ro);
 484       mark_and_move_for_policy(OP_favor_runtime, ik->class_annotations(), _move_ro);
 485       mark_and_move_for_policy(OP_favor_runtime, ik->fields_annotations(), _move_ro);
 486       mark_and_move_for_policy(OP_favor_runtime, ik->methods_annotations(), _move_ro);
 487       mark_and_move_for_policy(OP_favor_runtime, ik->methods_parameter_annotations(), _move_ro);
 488       mark_and_move_for_policy(OP_favor_runtime, ik->methods_default_annotations(), _move_ro);
 489       mark_and_move_for_policy(OP_favor_runtime, ik->inner_classes(), _move_ro);
 490       mark_and_move_for_policy(OP_favor_runtime, ik->secondary_supers(), _move_ro);
 491     }
 492   }
 493 };
 494 
 495 class MarkAndMoveOrderedReadWrite: public ObjectClosure {
 496 private:
 497   MoveMarkedObjects *_move_rw;
 498 
 499 public:
 500   MarkAndMoveOrderedReadWrite(MoveMarkedObjects *move_rw) : _move_rw(move_rw) {}
 501 
 502   void do_object(oop obj) {
 503     if (obj->is_klass() && obj->blueprint()->oop_is_instanceKlass()) {
 504       instanceKlass* ik = instanceKlass::cast((klassOop)obj);
 505       int i;
 506       
 507       mark_and_move_for_policy(OP_favor_startup, ik->as_klassOop(), _move_rw);
 508 
 509       if (ik->super() != NULL) {
 510         do_object(ik->super());
 511       }
 512 
 513       objArrayOop interfaces = ik->local_interfaces();
 514       for(i = 0; i < interfaces->length(); i++) {
 515         klassOop k = klassOop(interfaces->obj_at(i));
 516         mark_and_move_for_policy(OP_favor_startup, k, _move_rw);
 517         do_object(k);
 518       }
 519 
 520       objArrayOop methods = ik->methods();
 521       mark_and_move_for_policy(OP_favor_startup, methods, _move_rw);
 522       for(i = 0; i < methods->length(); i++) {
 523         methodOop m = methodOop(methods->obj_at(i));
 524         mark_and_move_for_policy(OP_favor_startup, m, _move_rw);
 525         mark_and_move_for_policy(OP_favor_startup, ik->constants(), _move_rw);          // idempotent
 526         mark_and_move_for_policy(OP_balanced, ik->constants()->cache(), _move_rw); // idempotent
 527         mark_and_move_for_policy(OP_balanced, ik->constants()->tags(), _move_rw);  // idempotent
 528       }
 529 
 530       mark_and_move_for_policy(OP_favor_startup, ik->as_klassOop()->klass(), _move_rw);
 531       mark_and_move_for_policy(OP_favor_startup, ik->constants()->klass(), _move_rw);
 532 
 533       // Although Java mirrors are marked in MarkReadWriteObjects,
 534       // apparently they were never moved into shared spaces since
 535       // MoveMarkedObjects skips marked instance oops.  This may
 536       // be a bug in the original implementation or simply the vestige
 537       // of an abandoned experiment.  Nevertheless we leave a hint
 538       // here in case this capability is ever correctly implemented.
 539       //
 540       // mark_and_move_for_policy(OP_favor_runtime, ik->java_mirror(), _move_rw);
 541     }
 542   }
 543 
 544 };
 545 
 546 // Adjust references in oops to refer to shared spaces.
 547 
 548 class ResolveForwardingClosure: public OopClosure {
 549 public:
 550   void do_oop(oop* p) {
 551     oop obj = *p;
 552     if (!obj->is_shared()) {
 553       if (obj != NULL) {
 554         oop f = obj->forwardee();
 555         guarantee(f->is_shared(), "Oop doesn't refer to shared space.");
 556         *p = f;
 557       }
 558     }
 559   }
 560 };
 561 
 562 
 563 void sort_methods(instanceKlass* ik, TRAPS) {
 564   klassOop super = ik->super();
 565   if (super != NULL) {
 566     sort_methods(instanceKlass::cast(super), THREAD);
 567   }
 568 
 569   // The methods array must be ordered by symbolOop address. (See
 570   // classFileParser.cpp where methods in a class are originally
 571   // sorted.)  Since objects have just be reordered, this must be
 572   // corrected.
 573   methodOopDesc::sort_methods(ik->methods(),
 574                               ik->methods_annotations(),
 575                               ik->methods_parameter_annotations(),
 576                               ik->methods_default_annotations(),
 577                               true /* idempotent, slow */);
 578   
 579   // Itable indices are calculated based on methods array order
 580   // (see klassItable::compute_itable_index()).  Must reinitialize.
 581   // We assume that since checkconstraints is false, this method
 582   // cannot throw an exception.  An exception here would be
 583   // problematic since this is the VMThread, not a JavaThread.
 584   ik->itable()->initialize_itable(false, THREAD);
 585 }  
 586 
 587 // Sort methods if the oop is an instanceKlass.
 588 
 589 class SortMethodsClosure: public ObjectClosure {
 590 private:
 591   Thread* _thread;
 592 
 593 public:
 594   SortMethodsClosure(Thread* thread) : _thread(thread) {}
 595 
 596   void do_object(oop obj) {
 597     // instanceKlass objects need some adjustment.
 598     if (obj->blueprint()->oop_is_instanceKlass()) {
 599       instanceKlass* ik = instanceKlass::cast((klassOop)obj);
 600 
 601       sort_methods(ik, _thread);
 602     }
 603   }
 604 };
 605 
 606 
 607 // Adjust references in oops to refer to shared spaces.
 608 
 609 class PatchOopsClosure: public ObjectClosure {
 610 private:
 611   Thread* _thread;
 612   ResolveForwardingClosure resolve;
 613 
 614 public:
 615   PatchOopsClosure(Thread* thread) : _thread(thread) {}
 616 
 617   void do_object(oop obj) {
 618     obj->oop_iterate_header(&resolve);
 619     obj->oop_iterate(&resolve);
 620 
 621     assert(obj->klass()->is_shared(), "Klass not pointing into shared space.");
 622 
 623     // If the object is a Java object or class which might (in the
 624     // future) contain a reference to a young gen object, add it to the
 625     // list.
 626 
 627     if (obj->is_klass() || obj->is_instance()) {
 628       if (obj->is_klass() ||
 629           obj->is_a(SystemDictionary::class_klass()) ||
 630           obj->is_a(SystemDictionary::throwable_klass())) {
 631         // Do nothing
 632       }
 633       else if (obj->is_a(SystemDictionary::string_klass())) {
 634         // immutable objects.
 635       } else {
 636         // someone added an object we hadn't accounted for.
 637         ShouldNotReachHere();
 638       }
 639     }
 640   }
 641 };
 642 
 643 
 644 // Empty the young and old generations.
 645 
 646 class ClearSpaceClosure : public SpaceClosure {
 647 public:
 648   void do_space(Space* s) {
 649     s->clear();
 650   }
 651 };
 652 
 653 
 654 // Closure for serializing initialization data out to a data area to be
 655 // written to the shared file.
 656 
 657 class WriteClosure : public SerializeOopClosure {
 658 private:
 659   oop* top;
 660   char* end;
 661 
 662   void out_of_space() {
 663     warning("\nThe shared miscellaneous data space is not large "
 664             "enough to \npreload requested classes.  Use "
 665             "-XX:SharedMiscDataSize= to increase \nthe initial "
 666             "size of the miscellaneous data space.\n");
 667     exit(2);
 668   }
 669 
 670 
 671   inline void check_space() {
 672     if ((char*)top + sizeof(oop) > end) {
 673       out_of_space();
 674     }
 675   }
 676 
 677 
 678 public:
 679   WriteClosure(char* md_top, char* md_end) {
 680     top = (oop*)md_top;
 681     end = md_end;
 682   }
 683 
 684   char* get_top() { return (char*)top; }
 685 
 686   void do_oop(oop* p) {
 687     check_space();
 688     oop obj = *p;
 689     assert(obj->is_oop_or_null(), "invalid oop");
 690     assert(obj == NULL || obj->is_shared(),
 691            "Oop in shared space not pointing into shared space.");
 692     *top = obj;
 693     ++top;
 694   }
 695 
 696   void do_int(int* p) {
 697     check_space();
 698     *top = (oop)(intptr_t)*p;
 699     ++top;
 700   }
 701 
 702   void do_size_t(size_t* p) {
 703     check_space();
 704     *top = (oop)(intptr_t)*p;
 705     ++top;
 706   }
 707 
 708   void do_ptr(void** p) {
 709     check_space();
 710     *top = (oop)*p;
 711     ++top;
 712   }
 713 
 714   void do_ptr(HeapWord** p) { do_ptr((void **) p); }
 715 
 716   void do_tag(int tag) {
 717     check_space();
 718     *top = (oop)(intptr_t)tag;
 719     ++top;
 720   }
 721 
 722   void do_region(u_char* start, size_t size) {
 723     if ((char*)top + size > end) {
 724       out_of_space();
 725     }
 726     assert((intptr_t)start % sizeof(oop) == 0, "bad alignment");
 727     assert(size % sizeof(oop) == 0, "bad size");
 728     do_tag((int)size);
 729     while (size > 0) {
 730       *top = *(oop*)start;
 731       ++top;
 732       start += sizeof(oop);
 733       size -= sizeof(oop);
 734     }
 735   }
 736 
 737   bool reading() const { return false; }
 738 };
 739 
 740 
 741 class ResolveConstantPoolsClosure : public ObjectClosure {
 742 private:
 743   TRAPS;
 744 public:
 745   ResolveConstantPoolsClosure(Thread *t) {
 746     __the_thread__ = t;
 747   }
 748   void do_object(oop obj) {
 749     if (obj->is_constantPool()) {
 750       constantPoolOop cpool = (constantPoolOop)obj;
 751       int unresolved = cpool->pre_resolve_shared_klasses(THREAD);
 752     }
 753   }
 754 };
 755 
 756 
 757 // Print a summary of the contents of the read/write spaces to help
 758 // identify objects which might be able to be made read-only.  At this
 759 // point, the objects have been written, and we can trash them as
 760 // needed.
 761 
 762 static void print_contents() {
 763   if (PrintSharedSpaces) {
 764     GenCollectedHeap* gch = GenCollectedHeap::heap();
 765     CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
 766 
 767     // High level summary of the read-only space:
 768 
 769     ClassifyObjectClosure coc;
 770     tty->cr(); tty->print_cr("ReadOnly space:");
 771     gen->ro_space()->object_iterate(&coc);
 772     coc.print();
 773 
 774     // High level summary of the read-write space:
 775 
 776     coc.reset();
 777     tty->cr(); tty->print_cr("ReadWrite space:");
 778     gen->rw_space()->object_iterate(&coc);
 779     coc.print();
 780   
 781     // Reset counters
 782 
 783     ClearAllocCountClosure cacc;
 784     gen->ro_space()->object_iterate(&cacc);
 785     gen->rw_space()->object_iterate(&cacc);
 786     coc.reset();
 787 
 788     // Lower level summary of the read-only space:
 789 
 790     gen->ro_space()->object_iterate(&coc);
 791     tty->cr(); tty->print_cr("ReadOnly space:");
 792     ClassifyInstanceKlassClosure cikc;
 793     gen->rw_space()->object_iterate(&cikc);
 794     cikc.print();
 795 
 796     // Reset counters
 797 
 798     gen->ro_space()->object_iterate(&cacc);
 799     gen->rw_space()->object_iterate(&cacc);
 800     coc.reset();
 801 
 802     // Lower level summary of the read-write space:
 803 
 804     gen->rw_space()->object_iterate(&coc);
 805     cikc.reset();
 806     tty->cr();  tty->print_cr("ReadWrite space:");
 807     gen->rw_space()->object_iterate(&cikc);
 808     cikc.print();
 809   }
 810 }
 811 
 812 
 813 // Patch C++ vtable pointer in klass oops.
 814 
 815 // Klass objects contain references to c++ vtables in the JVM library.
 816 // Fix them to point to our constructed vtables.  However, don't iterate
 817 // across the space while doing this, as that causes the vtables to be
 818 // patched, undoing our useful work.  Instead, iterate to make a list,
 819 // then use the list to do the fixing.
 820 
 821 class PatchKlassVtables: public ObjectClosure {
 822 private:
 823   void*         _vtbl_ptr;
 824   VirtualSpace* _md_vs;
 825   GrowableArray<klassOop>* _klass_objects;
 826 
 827 public:
 828 
 829   PatchKlassVtables(void* vtbl_ptr, VirtualSpace* md_vs) {
 830     _vtbl_ptr = vtbl_ptr;
 831     _md_vs = md_vs;
 832     _klass_objects = new GrowableArray<klassOop>();
 833   }
 834   
 835 
 836   void do_object(oop obj) {
 837     if (obj->is_klass()) {
 838       _klass_objects->append(klassOop(obj));
 839     }
 840   }
 841 
 842 
 843   void patch(void** vtbl_list, int vtbl_list_size) {
 844     for (int i = 0; i < _klass_objects->length(); ++i) {
 845       klassOop obj = (klassOop)_klass_objects->at(i);
 846       Klass* k = obj->klass_part();
 847       void* v =  *(void**)k;
 848 
 849       int n;
 850       for (n = 0; n < vtbl_list_size; ++n) {
 851         *(void**)k = NULL;
 852         if (vtbl_list[n] == v) {
 853           *(void**)k = (void**)_vtbl_ptr +
 854                                  (n * CompactingPermGenGen::num_virtuals);
 855           break;
 856         }
 857       }
 858       guarantee(n < vtbl_list_size, "unable to find matching vtbl pointer");
 859     }
 860   }
 861 };
 862 
 863 
 864 // Populate the shared space.
 865 
 866 class VM_PopulateDumpSharedSpace: public VM_Operation {
 867 private:
 868   GrowableArray<oop> *_class_promote_order;
 869   OffsetTableContigSpace* _ro_space;
 870   OffsetTableContigSpace* _rw_space;
 871   VirtualSpace* _md_vs;
 872   VirtualSpace* _mc_vs;
 873 
 874 public:
 875   VM_PopulateDumpSharedSpace(GrowableArray<oop> *class_promote_order,
 876                              OffsetTableContigSpace* ro_space,
 877                              OffsetTableContigSpace* rw_space,
 878                              VirtualSpace* md_vs, VirtualSpace* mc_vs) {
 879     _class_promote_order = class_promote_order;
 880     _ro_space = ro_space;
 881     _rw_space = rw_space;
 882     _md_vs = md_vs;
 883     _mc_vs = mc_vs;
 884   }
 885   
 886   VMOp_Type type() const { return VMOp_PopulateDumpSharedSpace; }
 887   void doit() {
 888     Thread* THREAD = VMThread::vm_thread();
 889     NOT_PRODUCT(SystemDictionary::verify();)
 890     // The following guarantee is meant to ensure that no loader constraints
 891     // exist yet, since the constraints table is not shared.  This becomes
 892     // more important now that we don't re-initialize vtables/itables for
 893     // shared classes at runtime, where constraints were previously created.
 894     guarantee(SystemDictionary::constraints()->number_of_entries() == 0,
 895               "loader constraints are not saved");
 896     GenCollectedHeap* gch = GenCollectedHeap::heap();
 897 
 898     // At this point, many classes have been loaded.
 899 
 900     // Update all the fingerprints in the shared methods.
 901 
 902     tty->print("Calculating fingerprints ... ");
 903     FingerprintMethodsClosure fpmc;
 904     gch->object_iterate(&fpmc);
 905     tty->print_cr("done. ");
 906 
 907     // Remove all references outside the heap.
 908 
 909     tty->print("Removing unshareable information ... ");
 910     RemoveUnshareableInfoClosure ruic;
 911     gch->object_iterate(&ruic);
 912     tty->print_cr("done. ");
 913 
 914     // Move the objects in three passes.
 915 
 916     MarkObjectsOopClosure mark_all;
 917     MarkCommonReadOnly mark_common_ro;
 918     MarkCommonSymbols mark_common_symbols;
 919     MarkStringValues mark_string_values;
 920     MarkReadWriteObjects mark_rw;
 921     MarkStringObjects mark_strings;
 922     MoveMarkedObjects move_ro(_ro_space, true);
 923     MoveMarkedObjects move_rw(_rw_space, false);
 924     
 925     // The SharedOptimizeColdStart VM option governs the new layout
 926     // algorithm for promoting classes into the shared archive.
 927     // The general idea is to minimize cold start time by laying
 928     // out the objects in the order they are accessed at startup time.
 929     // By doing this we are trying to eliminate out-of-order accesses
 930     // in the shared archive.  This benefits cold startup time by making
 931     // disk reads as sequential as possible during class loading and 
 932     // bootstrapping activities.  There may also be a small secondary
 933     // effect of better "packing" of more commonly used data on a smaller
 934     // number of pages, although no direct benefit has been measured from
 935     // this effect.
 936     //
 937     // At the class level of granularity, the promotion order is dictated
 938     // by the classlist file whose generation is discussed elsewhere.
 939     // 
 940     // At smaller granularity, optimal ordering was determined by an
 941     // offline analysis of object access order in the shared archive.
 942     // The dbx watchpoint facility, combined with SA post-processing,
 943     // was used to observe common access patterns primarily during
 944     // classloading.  This information was used to craft the promotion
 945     // order seen in the following closures.
 946     //
 947     // The observed access order is mostly governed by what happens
 948     // in SystemDictionary::load_shared_class().  NOTE WELL - care
 949     // should be taken when making changes to this method, because it
 950     // may invalidate assumptions made about access order!
 951     //
 952     // (Ideally, there would be a better way to manage changes to
 953     //  the access order.  Unfortunately a generic in-VM solution for
 954     //  dynamically observing access order and optimizing shared
 955     //  archive layout is pretty difficult.  We go with the static
 956     //  analysis because the code is fairly mature at this point
 957     //  and we're betting that the access order won't change much.)
 958 
 959     MarkAndMoveOrderedReadOnly  mark_and_move_ordered_ro(&move_ro);
 960     MarkAndMoveOrderedReadWrite mark_and_move_ordered_rw(&move_rw);
 961 
 962     // Phase 1a: move commonly used read-only objects to the read-only space.
 963 
 964     if (SharedOptimizeColdStart) {
 965       tty->print("Moving pre-ordered read-only objects to shared space at " PTR_FORMAT " ... ",
 966                  _ro_space->top());
 967       for (int i = 0; i < _class_promote_order->length(); i++) {
 968         oop obj = _class_promote_order->at(i);
 969         mark_and_move_ordered_ro.do_object(obj);
 970       }
 971       tty->print_cr("done. ");
 972     }
 973 
 974     tty->print("Moving read-only objects to shared space at " PTR_FORMAT " ... ",
 975                _ro_space->top());
 976     gch->object_iterate(&mark_common_ro);
 977     gch->object_iterate(&move_ro);
 978     tty->print_cr("done. ");
 979 
 980     // Phase 1b: move commonly used symbols to the read-only space.
 981 
 982     tty->print("Moving common symbols to shared space at " PTR_FORMAT " ... ",
 983                _ro_space->top());
 984     gch->object_iterate(&mark_common_symbols);
 985     gch->object_iterate(&move_ro);
 986     tty->print_cr("done. ");
 987 
 988     // Phase 1c: move remaining symbols to the read-only space
 989     // (e.g. String initializers).
 990 
 991     tty->print("Moving remaining symbols to shared space at " PTR_FORMAT " ... ",
 992                _ro_space->top());
 993     vmSymbols::oops_do(&mark_all, true);
 994     gch->object_iterate(&move_ro);
 995     tty->print_cr("done. ");
 996 
 997     // Phase 1d: move String character arrays to the read-only space.
 998 
 999     tty->print("Moving string char arrays to shared space at " PTR_FORMAT " ... ",
1000                _ro_space->top());
1001     gch->object_iterate(&mark_string_values);
1002     gch->object_iterate(&move_ro);
1003     tty->print_cr("done. ");
1004 
1005     // Phase 2: move all remaining symbols to the read-only space.  The
1006     // remaining symbols are assumed to be string initializers no longer
1007     // referenced.
1008 
1009     void* extra_symbols = _ro_space->top();
1010     tty->print("Moving additional symbols to shared space at " PTR_FORMAT " ... ",
1011                _ro_space->top());
1012     SymbolTable::oops_do(&mark_all);
1013     gch->object_iterate(&move_ro);
1014     tty->print_cr("done. ");
1015     tty->print_cr("Read-only space ends at " PTR_FORMAT ", %d bytes.",
1016                   _ro_space->top(), _ro_space->used());
1017 
1018     // Phase 3: move read-write objects to the read-write space, except
1019     // Strings.
1020 
1021     if (SharedOptimizeColdStart) {
1022       tty->print("Moving pre-ordered read-write objects to shared space at " PTR_FORMAT " ... ",
1023                  _rw_space->top());
1024       for (int i = 0; i < _class_promote_order->length(); i++) {
1025         oop obj = _class_promote_order->at(i);
1026         mark_and_move_ordered_rw.do_object(obj);
1027       }
1028       tty->print_cr("done. ");
1029     } 
1030     tty->print("Moving read-write objects to shared space at " PTR_FORMAT " ... ",
1031                _rw_space->top());
1032     Universe::oops_do(&mark_all, true);
1033     SystemDictionary::oops_do(&mark_all);
1034     oop tmp = Universe::arithmetic_exception_instance();
1035     mark_object(java_lang_Throwable::message(tmp));
1036     gch->object_iterate(&mark_rw);
1037     gch->object_iterate(&move_rw);
1038     tty->print_cr("done. ");
1039 
1040     // Phase 4: move String objects to the read-write space.
1041 
1042     tty->print("Moving String objects to shared space at " PTR_FORMAT " ... ",
1043                _rw_space->top());
1044     StringTable::oops_do(&mark_all);
1045     gch->object_iterate(&mark_strings);
1046     gch->object_iterate(&move_rw);
1047     tty->print_cr("done. ");
1048     tty->print_cr("Read-write space ends at " PTR_FORMAT ", %d bytes.",
1049                   _rw_space->top(), _rw_space->used());
1050 
1051 #ifdef DEBUG
1052     // Check: scan for objects which were not moved.
1053 
1054     CheckRemainingObjects check_objects;
1055     gch->object_iterate(&check_objects);
1056     check_objects.status();
1057 #endif
1058 
1059     // Resolve forwarding in objects and saved C++ structures
1060     tty->print("Updating references to shared objects ... ");
1061     ResolveForwardingClosure resolve;
1062     Universe::oops_do(&resolve);
1063     SystemDictionary::oops_do(&resolve);
1064     StringTable::oops_do(&resolve);
1065     SymbolTable::oops_do(&resolve);
1066     vmSymbols::oops_do(&resolve);
1067 
1068     // Set up the share data and shared code segments.
1069 
1070     char* md_top = _md_vs->low();
1071     char* md_end = _md_vs->high();
1072     char* mc_top = _mc_vs->low();
1073     char* mc_end = _mc_vs->high();
1074 
1075     // Reserve space for the list of klassOops whose vtables are used
1076     // for patching others as needed.
1077 
1078     void** vtbl_list = (void**)md_top;
1079     int vtbl_list_size = CompactingPermGenGen::vtbl_list_size;
1080     Universe::init_self_patching_vtbl_list(vtbl_list, vtbl_list_size);
1081 
1082     md_top += vtbl_list_size * sizeof(void*);
1083     void* vtable = md_top;
1084 
1085     // Reserve space for a new dummy vtable for klass objects in the
1086     // heap.  Generate self-patching vtable entries.
1087 
1088     CompactingPermGenGen::generate_vtable_methods(vtbl_list,
1089                                                   &vtable,
1090                                                   &md_top, md_end,
1091                                                   &mc_top, mc_end);
1092 
1093     // Fix (forward) all of the references in these shared objects (which
1094     // are required to point ONLY to objects in the shared spaces). 
1095     // Also, create a list of all objects which might later contain a
1096     // reference to a younger generation object.
1097 
1098     CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
1099     PatchOopsClosure patch(THREAD);
1100     gen->ro_space()->object_iterate(&patch);
1101     gen->rw_space()->object_iterate(&patch);
1102 
1103     // Previously method sorting was done concurrently with forwarding
1104     // pointer resolution in the shared spaces.  This imposed an ordering
1105     // restriction in that methods were required to be promoted/patched
1106     // before their holder classes.  (Because constant pool pointers in
1107     // methodKlasses are required to be resolved before their holder class
1108     // is visited for sorting, otherwise methods are sorted by incorrect,
1109     // pre-forwarding addresses.)
1110     //
1111     // Now, we reorder methods as a separate step after ALL forwarding
1112     // pointer resolution, so that methods can be promoted in any order
1113     // with respect to their holder classes.
1114     
1115     SortMethodsClosure sort(THREAD);
1116     gen->ro_space()->object_iterate(&sort);
1117     gen->rw_space()->object_iterate(&sort);
1118     tty->print_cr("done. ");
1119     tty->cr();
1120 
1121     // Reorder the system dictionary.  (Moving the symbols opps affects
1122     // how the hash table indices are calculated.)
1123 
1124     SystemDictionary::reorder_dictionary();
1125 
1126     // Empty the non-shared heap (because most of the objects were
1127     // copied out, and the remainder cannot be considered valid oops).
1128 
1129     ClearSpaceClosure csc;
1130     for (int i = 0; i < gch->n_gens(); ++i) {
1131       gch->get_gen(i)->space_iterate(&csc);
1132     }
1133     csc.do_space(gen->the_space());
1134     NOT_PRODUCT(SystemDictionary::verify();)
1135 
1136     // Copy the String table, the symbol table, and the system
1137     // dictionary to the shared space in usable form.  Copy the hastable
1138     // buckets first [read-write], then copy the linked lists of entries
1139     // [read-only].
1140 
1141     SymbolTable::reverse(extra_symbols);
1142     NOT_PRODUCT(SymbolTable::verify());
1143     SymbolTable::copy_buckets(&md_top, md_end);
1144 
1145     StringTable::reverse();
1146     NOT_PRODUCT(StringTable::verify());
1147     StringTable::copy_buckets(&md_top, md_end);
1148 
1149     SystemDictionary::reverse();
1150     SystemDictionary::copy_buckets(&md_top, md_end);
1151 
1152     ClassLoader::verify();
1153     ClassLoader::copy_package_info_buckets(&md_top, md_end);
1154     ClassLoader::verify();
1155 
1156     SymbolTable::copy_table(&md_top, md_end);
1157     StringTable::copy_table(&md_top, md_end);
1158     SystemDictionary::copy_table(&md_top, md_end);
1159     ClassLoader::verify();
1160     ClassLoader::copy_package_info_table(&md_top, md_end);
1161     ClassLoader::verify();
1162 
1163     // Print debug data.
1164 
1165     if (PrintSharedSpaces) {
1166       const char* fmt = "%s space: " PTR_FORMAT " out of " PTR_FORMAT " bytes allocated at " PTR_FORMAT ".";
1167       tty->print_cr(fmt, "ro", _ro_space->used(), _ro_space->capacity(),
1168                     _ro_space->bottom());
1169       tty->print_cr(fmt, "rw", _rw_space->used(), _rw_space->capacity(),
1170                     _rw_space->bottom());
1171     }
1172 
1173     // Write the oop data to the output array.
1174     
1175     WriteClosure wc(md_top, md_end);
1176     CompactingPermGenGen::serialize_oops(&wc);
1177     md_top = wc.get_top();
1178 
1179     // Update the vtable pointers in all of the Klass objects in the
1180     // heap. They should point to newly generated vtable.
1181 
1182     PatchKlassVtables pkvt(vtable, _md_vs);
1183     _rw_space->object_iterate(&pkvt);
1184     pkvt.patch(vtbl_list, vtbl_list_size);
1185 
1186     char* saved_vtbl = (char*)malloc(vtbl_list_size * sizeof(void*));
1187     memmove(saved_vtbl, vtbl_list, vtbl_list_size * sizeof(void*));
1188     memset(vtbl_list, 0, vtbl_list_size * sizeof(void*));
1189 
1190     // Create and write the archive file that maps the shared spaces.
1191 
1192     FileMapInfo* mapinfo = new FileMapInfo();
1193     mapinfo->populate_header(gch->gen_policy()->max_alignment());
1194 
1195     // Pass 1 - update file offsets in header.
1196     mapinfo->write_header();
1197     mapinfo->write_space(CompactingPermGenGen::ro, _ro_space, true);
1198     _ro_space->set_saved_mark();
1199     mapinfo->write_space(CompactingPermGenGen::rw, _rw_space, false);
1200     _rw_space->set_saved_mark();
1201     mapinfo->write_region(CompactingPermGenGen::md, _md_vs->low(), 
1202                           md_top - _md_vs->low(), SharedMiscDataSize,
1203                           false, false);
1204     mapinfo->write_region(CompactingPermGenGen::mc, _mc_vs->low(), 
1205                           mc_top - _mc_vs->low(), SharedMiscCodeSize,
1206                           true, true);
1207 
1208     // Pass 2 - write data.
1209     mapinfo->open_for_write();
1210     mapinfo->write_header();
1211     mapinfo->write_space(CompactingPermGenGen::ro, _ro_space, true);
1212     mapinfo->write_space(CompactingPermGenGen::rw, _rw_space, false);
1213     mapinfo->write_region(CompactingPermGenGen::md, _md_vs->low(), 
1214                           md_top - _md_vs->low(), SharedMiscDataSize,
1215                           false, false);
1216     mapinfo->write_region(CompactingPermGenGen::mc, _mc_vs->low(), 
1217                           mc_top - _mc_vs->low(), SharedMiscCodeSize,
1218                           true, true);
1219     mapinfo->close();
1220 
1221     // Summarize heap.
1222     memmove(vtbl_list, saved_vtbl, vtbl_list_size * sizeof(void*));
1223     print_contents();
1224   }
1225 }; // class VM_PopulateDumpSharedSpace
1226 
1227 
1228 // Populate the shared spaces and dump to a file.
1229 
1230 jint CompactingPermGenGen::dump_shared(GrowableArray<oop>* class_promote_order, TRAPS) {
1231   GenCollectedHeap* gch = GenCollectedHeap::heap();
1232 
1233   // Calculate hash values for all of the (interned) strings to avoid
1234   // writes to shared pages in the future.
1235 
1236   tty->print("Calculating hash values for String objects .. ");
1237   StringHashCodeClosure shcc(THREAD);
1238   StringTable::oops_do(&shcc);
1239   tty->print_cr("done. ");
1240 
1241   CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
1242   VM_PopulateDumpSharedSpace op(class_promote_order,
1243                                 gen->ro_space(), gen->rw_space(),
1244                                 gen->md_space(), gen->mc_space());
1245   VMThread::execute(&op);
1246   return JNI_OK;
1247 }
1248 
1249 
1250 class LinkClassesClosure : public ObjectClosure {
1251  private:
1252   Thread* THREAD;
1253 
1254  public:
1255   LinkClassesClosure(Thread* thread) : THREAD(thread) {}
1256 
1257   void do_object(oop obj) {
1258     if (obj->is_klass()) {
1259       Klass* k = Klass::cast((klassOop) obj);
1260       if (k->oop_is_instance()) {
1261         instanceKlass* ik = (instanceKlass*) k;
1262         // Link the class to cause the bytecodes to be rewritten and the
1263         // cpcache to be created.
1264         if (ik->get_init_state() < instanceKlass::linked) {
1265           ik->link_class(THREAD);
1266           guarantee(!HAS_PENDING_EXCEPTION, "exception in class rewriting");
1267         }
1268 
1269         // Create String objects from string initializer symbols.
1270         ik->constants()->resolve_string_constants(THREAD);
1271         guarantee(!HAS_PENDING_EXCEPTION, "exception resolving string constants");
1272       }
1273     }
1274   }
1275 };
1276 
1277 
1278 // Support for a simple checksum of the contents of the class list
1279 // file to prevent trivial tampering. The algorithm matches that in
1280 // the MakeClassList program used by the J2SE build process.
1281 #define JSUM_SEED ((jlong)CONST64(0xcafebabebabecafe))
1282 static jlong
1283 jsum(jlong start, const char *buf, const int len)
1284 {
1285     jlong h = start;
1286     char *p = (char *)buf, *e = p + len;
1287     while (p < e) {
1288         char c = *p++;
1289         if (c <= ' ') {
1290             /* Skip spaces and control characters */
1291             continue;
1292         }
1293         h = 31 * h + c;
1294     }
1295     return h;
1296 }
1297 
1298 
1299 
1300 
1301 
1302 // Preload classes from a list, populate the shared spaces and dump to a
1303 // file.
1304 
1305 void GenCollectedHeap::preload_and_dump(TRAPS) {
1306   TraceTime timer("Dump Shared Spaces", TraceStartupTime);
1307   ResourceMark rm;
1308 
1309   // Preload classes to be shared.
1310   // Should use some hpi:: method rather than fopen() here. aB.
1311   // Construct the path to the class list (in jre/lib)
1312   // Walk up two directories from the location of the VM and
1313   // optionally tack on "lib" (depending on platform)
1314   char class_list_path[JVM_MAXPATHLEN];
1315   os::jvm_path(class_list_path, sizeof(class_list_path));
1316   for (int i = 0; i < 3; i++) {
1317     char *end = strrchr(class_list_path, *os::file_separator());
1318     if (end != NULL) *end = '\0';
1319   }
1320   int class_list_path_len = (int)strlen(class_list_path);
1321   if (class_list_path_len >= 3) {
1322     if (strcmp(class_list_path + class_list_path_len - 3, "lib") != 0) {
1323       strcat(class_list_path, os::file_separator());
1324       strcat(class_list_path, "lib");
1325     }
1326   }
1327   strcat(class_list_path, os::file_separator());
1328   strcat(class_list_path, "classlist");
1329 
1330   FILE* file = fopen(class_list_path, "r");
1331   if (file != NULL) {
1332     jlong computed_jsum  = JSUM_SEED;
1333     jlong file_jsum      = 0;
1334 
1335     char class_name[256];
1336     int class_count = 0;
1337     GenCollectedHeap* gch = GenCollectedHeap::heap();
1338     gch->_preloading_shared_classes = true;
1339     GrowableArray<oop>* class_promote_order = new GrowableArray<oop>();
1340 
1341     // Preload (and intern) strings which will be used later.
1342 
1343     StringTable::intern("main", THREAD);
1344     StringTable::intern("([Ljava/lang/String;)V", THREAD);
1345     StringTable::intern("Ljava/lang/Class;", THREAD);
1346 
1347     StringTable::intern("I", THREAD);   // Needed for StringBuffer persistence?
1348     StringTable::intern("Z", THREAD);   // Needed for StringBuffer persistence?
1349 
1350     // sun.io.Converters
1351     static const char obj_array_sig[] = "[[Ljava/lang/Object;";
1352     SymbolTable::lookup(obj_array_sig, (int)strlen(obj_array_sig), THREAD);
1353 
1354     // java.util.HashMap
1355     static const char map_entry_array_sig[] = "[Ljava/util/Map$Entry;";
1356     SymbolTable::lookup(map_entry_array_sig, (int)strlen(map_entry_array_sig),
1357                         THREAD);
1358 
1359     tty->print("Loading classes to share ... ");
1360     while ((fgets(class_name, sizeof class_name, file)) != NULL) {
1361       if (*class_name == '#') {
1362         jint fsh, fsl;
1363         if (sscanf(class_name, "# %8x%8x\n", &fsh, &fsl) == 2) {
1364           file_jsum = ((jlong)(fsh) << 32) | (fsl & 0xffffffff);
1365         }        
1366 
1367         continue;
1368       }
1369       // Remove trailing newline
1370       size_t name_len = strlen(class_name);
1371       class_name[name_len-1] = '\0';
1372 
1373       computed_jsum = jsum(computed_jsum, class_name, (const int)name_len - 1);
1374 
1375       // Got a class name - load it.
1376       symbolHandle class_name_symbol = oopFactory::new_symbol(class_name,
1377                                                               THREAD);
1378       guarantee(!HAS_PENDING_EXCEPTION, "Exception creating a symbol.");
1379       klassOop klass = SystemDictionary::resolve_or_null(class_name_symbol,
1380                                                          THREAD);
1381       guarantee(!HAS_PENDING_EXCEPTION, "Exception resolving a class.");
1382       if (klass != NULL) {
1383         if (PrintSharedSpaces) {
1384           tty->print_cr("Shared spaces preloaded: %s", class_name);
1385         }
1386 
1387 
1388         instanceKlass* ik = instanceKlass::cast(klass);
1389 
1390         // Should be class load order as per -XX:+TraceClassLoadingPreorder
1391         class_promote_order->append(ik->as_klassOop());
1392 
1393         // Link the class to cause the bytecodes to be rewritten and the
1394         // cpcache to be created. The linking is done as soon as classes
1395         // are loaded in order that the related data structures (klass,
1396         // cpCache, Sting constants) are located together.
1397 
1398         if (ik->get_init_state() < instanceKlass::linked) {
1399           ik->link_class(THREAD);
1400           guarantee(!(HAS_PENDING_EXCEPTION), "exception in class rewriting");
1401         }
1402 
1403         // Create String objects from string initializer symbols.
1404 
1405         ik->constants()->resolve_string_constants(THREAD);
1406 
1407         class_count++;
1408       } else {
1409         if (PrintSharedSpaces) {
1410           tty->cr();
1411           tty->print_cr(" Preload failed: %s", class_name);
1412         }
1413       }
1414       file_jsum = 0; // Checksum must be on last line of file
1415     }
1416     if (computed_jsum != file_jsum) {
1417       tty->cr();
1418       tty->print_cr("Preload failed: checksum of class list was incorrect.");
1419       exit(1);
1420     }
1421 
1422     tty->print_cr("done. ");
1423 
1424     if (PrintSharedSpaces) {
1425       tty->print_cr("Shared spaces: preloaded %d classes", class_count);
1426     }
1427 
1428     // Rewrite and unlink classes.
1429     tty->print("Rewriting and unlinking classes ... ");
1430     // Make heap parsable
1431     ensure_parsability(false); // arg is actually don't care
1432 
1433     // Link any classes which got missed.  (It's not quite clear why
1434     // they got missed.)  This iteration would be unsafe if we weren't
1435     // single-threaded at this point; however we can't do it on the VM
1436     // thread because it requires object allocation.
1437     LinkClassesClosure lcc(Thread::current());
1438     object_iterate(&lcc);
1439     tty->print_cr("done. ");
1440 
1441     // Create and dump the shared spaces.
1442     jint err = CompactingPermGenGen::dump_shared(class_promote_order, THREAD);
1443     if (err != JNI_OK) {
1444       fatal("Dumping shared spaces failed.");
1445     }
1446 
1447   } else {
1448     char errmsg[JVM_MAXPATHLEN];
1449     hpi::lasterror(errmsg, JVM_MAXPATHLEN);
1450     tty->print_cr("Loading classlist failed: %s", errmsg);
1451     exit(1);
1452   }
1453 
1454   // Since various initialization steps have been undone by this process,
1455   // it is not reasonable to continue running a java process.
1456   exit(0);
1457 }