1 /* 2 * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.hpp" 27 #include "classfile/systemDictionary.hpp" 28 #include "classfile/verifier.hpp" 29 #include "classfile/vmSymbols.hpp" 30 #include "compiler/compileBroker.hpp" 31 #include "gc_implementation/shared/markSweep.inline.hpp" 32 #include "gc_interface/collectedHeap.inline.hpp" 33 #include "interpreter/oopMapCache.hpp" 34 #include "interpreter/rewriter.hpp" 35 #include "jvmtifiles/jvmti.h" 36 #include "memory/genOopClosures.inline.hpp" 37 #include "memory/heapInspection.hpp" 38 #include "memory/iterator.inline.hpp" 39 #include "memory/metadataFactory.hpp" 40 #include "memory/oopFactory.hpp" 41 #include "memory/specialized_oop_closures.hpp" 42 #include "oops/fieldStreams.hpp" 43 #include "oops/instanceClassLoaderKlass.hpp" 44 #include "oops/instanceKlass.hpp" 45 #include "oops/instanceMirrorKlass.hpp" 46 #include "oops/instanceOop.hpp" 47 #include "oops/klass.inline.hpp" 48 #include "oops/method.hpp" 49 #include "oops/oop.inline.hpp" 50 #include "oops/symbol.hpp" 51 #include "prims/jvmtiExport.hpp" 52 #include "prims/jvmtiRedefineClassesTrace.hpp" 53 #include "prims/jvmtiRedefineClasses.hpp" 54 #include "prims/jvmtiThreadState.hpp" 55 #include "prims/methodComparator.hpp" 56 #include "runtime/atomic.inline.hpp" 57 #include "runtime/fieldDescriptor.hpp" 58 #include "runtime/handles.inline.hpp" 59 #include "runtime/javaCalls.hpp" 60 #include "runtime/mutexLocker.hpp" 61 #include "runtime/orderAccess.inline.hpp" 62 #include "runtime/thread.inline.hpp" 63 #include "services/classLoadingService.hpp" 64 #include "services/threadService.hpp" 65 #include "utilities/dtrace.hpp" 66 #include "utilities/macros.hpp" 67 #if INCLUDE_ALL_GCS 68 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp" 69 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp" 70 #include "gc_implementation/g1/g1OopClosures.inline.hpp" 71 #include "gc_implementation/g1/g1RemSet.inline.hpp" 72 #include "gc_implementation/g1/heapRegionManager.inline.hpp" 73 #include "gc_implementation/parNew/parOopClosures.inline.hpp" 74 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.inline.hpp" 75 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp" 76 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp" 77 #include "oops/oop.pcgc.inline.hpp" 78 #endif // INCLUDE_ALL_GCS 79 #ifdef COMPILER1 80 #include "c1/c1_Compiler.hpp" 81 #endif 82 83 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC 84 85 #ifdef DTRACE_ENABLED 86 87 88 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED 89 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE 90 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT 91 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS 92 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED 93 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT 94 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR 95 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END 96 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type) \ 97 { \ 98 char* data = NULL; \ 99 int len = 0; \ 100 Symbol* name = (clss)->name(); \ 101 if (name != NULL) { \ 102 data = (char*)name->bytes(); \ 103 len = name->utf8_length(); \ 104 } \ 105 HOTSPOT_CLASS_INITIALIZATION_##type( \ 106 data, len, (clss)->class_loader(), thread_type); \ 107 } 108 109 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \ 110 { \ 111 char* data = NULL; \ 112 int len = 0; \ 113 Symbol* name = (clss)->name(); \ 114 if (name != NULL) { \ 115 data = (char*)name->bytes(); \ 116 len = name->utf8_length(); \ 117 } \ 118 HOTSPOT_CLASS_INITIALIZATION_##type( \ 119 data, len, (clss)->class_loader(), thread_type, wait); \ 120 } 121 122 #else // ndef DTRACE_ENABLED 123 124 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type) 125 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) 126 127 #endif // ndef DTRACE_ENABLED 128 129 volatile int InstanceKlass::_total_instanceKlass_count = 0; 130 131 InstanceKlass* InstanceKlass::allocate_instance_klass( 132 ClassLoaderData* loader_data, 133 int vtable_len, 134 int itable_len, 135 int static_field_size, 136 int nonstatic_oop_map_size, 137 ReferenceType rt, 138 AccessFlags access_flags, 139 Symbol* name, 140 Klass* super_klass, 141 bool is_anonymous, 142 TRAPS) { 143 144 int size = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size, 145 access_flags.is_interface(), is_anonymous); 146 147 // Allocation 148 InstanceKlass* ik; 149 if (rt == REF_NONE) { 150 if (name == vmSymbols::java_lang_Class()) { 151 ik = new (loader_data, size, THREAD) InstanceMirrorKlass( 152 vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt, 153 access_flags, is_anonymous); 154 } else if (name == vmSymbols::java_lang_ClassLoader() || 155 (SystemDictionary::ClassLoader_klass_loaded() && 156 super_klass != NULL && 157 super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass()))) { 158 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass( 159 vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt, 160 access_flags, is_anonymous); 161 } else { 162 // normal class 163 ik = new (loader_data, size, THREAD) InstanceKlass( 164 vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt, 165 access_flags, is_anonymous); 166 } 167 } else { 168 // reference klass 169 ik = new (loader_data, size, THREAD) InstanceRefKlass( 170 vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt, 171 access_flags, is_anonymous); 172 } 173 174 // Check for pending exception before adding to the loader data and incrementing 175 // class count. Can get OOM here. 176 if (HAS_PENDING_EXCEPTION) { 177 return NULL; 178 } 179 180 // Add all classes to our internal class loader list here, 181 // including classes in the bootstrap (NULL) class loader. 182 loader_data->add_class(ik); 183 184 Atomic::inc(&_total_instanceKlass_count); 185 return ik; 186 } 187 188 189 // copy method ordering from resource area to Metaspace 190 void InstanceKlass::copy_method_ordering(intArray* m, TRAPS) { 191 if (m != NULL) { 192 // allocate a new array and copy contents (memcpy?) 193 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK); 194 for (int i = 0; i < m->length(); i++) { 195 _method_ordering->at_put(i, m->at(i)); 196 } 197 } else { 198 _method_ordering = Universe::the_empty_int_array(); 199 } 200 } 201 202 // create a new array of vtable_indices for default methods 203 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) { 204 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL); 205 assert(default_vtable_indices() == NULL, "only create once"); 206 set_default_vtable_indices(vtable_indices); 207 return vtable_indices; 208 } 209 210 InstanceKlass::InstanceKlass(int vtable_len, 211 int itable_len, 212 int static_field_size, 213 int nonstatic_oop_map_size, 214 ReferenceType rt, 215 AccessFlags access_flags, 216 bool is_anonymous) { 217 No_Safepoint_Verifier no_safepoint; // until k becomes parsable 218 219 int iksize = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size, 220 access_flags.is_interface(), is_anonymous); 221 222 set_vtable_length(vtable_len); 223 set_itable_length(itable_len); 224 set_static_field_size(static_field_size); 225 set_nonstatic_oop_map_size(nonstatic_oop_map_size); 226 set_access_flags(access_flags); 227 _misc_flags = 0; // initialize to zero 228 set_is_anonymous(is_anonymous); 229 assert(size() == iksize, "wrong size for object"); 230 231 set_array_klasses(NULL); 232 set_methods(NULL); 233 set_method_ordering(NULL); 234 set_default_methods(NULL); 235 set_default_vtable_indices(NULL); 236 set_local_interfaces(NULL); 237 set_transitive_interfaces(NULL); 238 init_implementor(); 239 set_fields(NULL, 0); 240 set_constants(NULL); 241 set_class_loader_data(NULL); 242 set_source_file_name_index(0); 243 set_source_debug_extension(NULL, 0); 244 set_array_name(NULL); 245 set_inner_classes(NULL); 246 set_static_oop_field_count(0); 247 set_nonstatic_field_size(0); 248 set_is_marked_dependent(false); 249 set_has_unloaded_dependent(false); 250 set_init_state(InstanceKlass::allocated); 251 set_init_thread(NULL); 252 set_reference_type(rt); 253 set_oop_map_cache(NULL); 254 set_jni_ids(NULL); 255 set_osr_nmethods_head(NULL); 256 set_breakpoints(NULL); 257 init_previous_versions(); 258 set_generic_signature_index(0); 259 release_set_methods_jmethod_ids(NULL); 260 set_annotations(NULL); 261 set_jvmti_cached_class_field_map(NULL); 262 set_initial_method_idnum(0); 263 _dependencies = NULL; 264 set_jvmti_cached_class_field_map(NULL); 265 set_cached_class_file(NULL); 266 set_initial_method_idnum(0); 267 set_minor_version(0); 268 set_major_version(0); 269 NOT_PRODUCT(_verify_count = 0;) 270 271 // initialize the non-header words to zero 272 intptr_t* p = (intptr_t*)this; 273 for (int index = InstanceKlass::header_size(); index < iksize; index++) { 274 p[index] = NULL_WORD; 275 } 276 277 // Set temporary value until parseClassFile updates it with the real instance 278 // size. 279 set_layout_helper(Klass::instance_layout_helper(0, true)); 280 } 281 282 283 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data, 284 Array<Method*>* methods) { 285 if (methods != NULL && methods != Universe::the_empty_method_array() && 286 !methods->is_shared()) { 287 for (int i = 0; i < methods->length(); i++) { 288 Method* method = methods->at(i); 289 if (method == NULL) continue; // maybe null if error processing 290 // Only want to delete methods that are not executing for RedefineClasses. 291 // The previous version will point to them so they're not totally dangling 292 assert (!method->on_stack(), "shouldn't be called with methods on stack"); 293 MetadataFactory::free_metadata(loader_data, method); 294 } 295 MetadataFactory::free_array<Method*>(loader_data, methods); 296 } 297 } 298 299 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data, 300 Klass* super_klass, 301 Array<Klass*>* local_interfaces, 302 Array<Klass*>* transitive_interfaces) { 303 // Only deallocate transitive interfaces if not empty, same as super class 304 // or same as local interfaces. See code in parseClassFile. 305 Array<Klass*>* ti = transitive_interfaces; 306 if (ti != Universe::the_empty_klass_array() && ti != local_interfaces) { 307 // check that the interfaces don't come from super class 308 Array<Klass*>* sti = (super_klass == NULL) ? NULL : 309 InstanceKlass::cast(super_klass)->transitive_interfaces(); 310 if (ti != sti && ti != NULL && !ti->is_shared()) { 311 MetadataFactory::free_array<Klass*>(loader_data, ti); 312 } 313 } 314 315 // local interfaces can be empty 316 if (local_interfaces != Universe::the_empty_klass_array() && 317 local_interfaces != NULL && !local_interfaces->is_shared()) { 318 MetadataFactory::free_array<Klass*>(loader_data, local_interfaces); 319 } 320 } 321 322 // This function deallocates the metadata and C heap pointers that the 323 // InstanceKlass points to. 324 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) { 325 326 // Orphan the mirror first, CMS thinks it's still live. 327 if (java_mirror() != NULL) { 328 java_lang_Class::set_klass(java_mirror(), NULL); 329 } 330 331 // Need to take this class off the class loader data list. 332 loader_data->remove_class(this); 333 334 // The array_klass for this class is created later, after error handling. 335 // For class redefinition, we keep the original class so this scratch class 336 // doesn't have an array class. Either way, assert that there is nothing 337 // to deallocate. 338 assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet"); 339 340 // Release C heap allocated data that this might point to, which includes 341 // reference counting symbol names. 342 release_C_heap_structures(); 343 344 deallocate_methods(loader_data, methods()); 345 set_methods(NULL); 346 347 if (method_ordering() != NULL && 348 method_ordering() != Universe::the_empty_int_array() && 349 !method_ordering()->is_shared()) { 350 MetadataFactory::free_array<int>(loader_data, method_ordering()); 351 } 352 set_method_ordering(NULL); 353 354 // default methods can be empty 355 if (default_methods() != NULL && 356 default_methods() != Universe::the_empty_method_array() && 357 !default_methods()->is_shared()) { 358 MetadataFactory::free_array<Method*>(loader_data, default_methods()); 359 } 360 // Do NOT deallocate the default methods, they are owned by superinterfaces. 361 set_default_methods(NULL); 362 363 // default methods vtable indices can be empty 364 if (default_vtable_indices() != NULL && 365 !default_vtable_indices()->is_shared()) { 366 MetadataFactory::free_array<int>(loader_data, default_vtable_indices()); 367 } 368 set_default_vtable_indices(NULL); 369 370 371 // This array is in Klass, but remove it with the InstanceKlass since 372 // this place would be the only caller and it can share memory with transitive 373 // interfaces. 374 if (secondary_supers() != NULL && 375 secondary_supers() != Universe::the_empty_klass_array() && 376 secondary_supers() != transitive_interfaces() && 377 !secondary_supers()->is_shared()) { 378 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers()); 379 } 380 set_secondary_supers(NULL); 381 382 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces()); 383 set_transitive_interfaces(NULL); 384 set_local_interfaces(NULL); 385 386 if (fields() != NULL && !fields()->is_shared()) { 387 MetadataFactory::free_array<jushort>(loader_data, fields()); 388 } 389 set_fields(NULL, 0); 390 391 // If a method from a redefined class is using this constant pool, don't 392 // delete it, yet. The new class's previous version will point to this. 393 if (constants() != NULL) { 394 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack"); 395 if (!constants()->is_shared()) { 396 MetadataFactory::free_metadata(loader_data, constants()); 397 } 398 set_constants(NULL); 399 } 400 401 if (inner_classes() != NULL && 402 inner_classes() != Universe::the_empty_short_array() && 403 !inner_classes()->is_shared()) { 404 MetadataFactory::free_array<jushort>(loader_data, inner_classes()); 405 } 406 set_inner_classes(NULL); 407 408 // We should deallocate the Annotations instance if it's not in shared spaces. 409 if (annotations() != NULL && !annotations()->is_shared()) { 410 MetadataFactory::free_metadata(loader_data, annotations()); 411 } 412 set_annotations(NULL); 413 } 414 415 bool InstanceKlass::should_be_initialized() const { 416 return !is_initialized(); 417 } 418 419 klassVtable* InstanceKlass::vtable() const { 420 return new klassVtable(this, start_of_vtable(), vtable_length() / vtableEntry::size()); 421 } 422 423 klassItable* InstanceKlass::itable() const { 424 return new klassItable(instanceKlassHandle(this)); 425 } 426 427 void InstanceKlass::eager_initialize(Thread *thread) { 428 if (!EagerInitialization) return; 429 430 if (this->is_not_initialized()) { 431 // abort if the the class has a class initializer 432 if (this->class_initializer() != NULL) return; 433 434 // abort if it is java.lang.Object (initialization is handled in genesis) 435 Klass* super = this->super(); 436 if (super == NULL) return; 437 438 // abort if the super class should be initialized 439 if (!InstanceKlass::cast(super)->is_initialized()) return; 440 441 // call body to expose the this pointer 442 instanceKlassHandle this_k(thread, this); 443 eager_initialize_impl(this_k); 444 } 445 } 446 447 // JVMTI spec thinks there are signers and protection domain in the 448 // instanceKlass. These accessors pretend these fields are there. 449 // The hprof specification also thinks these fields are in InstanceKlass. 450 oop InstanceKlass::protection_domain() const { 451 // return the protection_domain from the mirror 452 return java_lang_Class::protection_domain(java_mirror()); 453 } 454 455 // To remove these from requires an incompatible change and CCC request. 456 objArrayOop InstanceKlass::signers() const { 457 // return the signers from the mirror 458 return java_lang_Class::signers(java_mirror()); 459 } 460 461 oop InstanceKlass::init_lock() const { 462 // return the init lock from the mirror 463 oop lock = java_lang_Class::init_lock(java_mirror()); 464 // Prevent reordering with any access of initialization state 465 OrderAccess::loadload(); 466 assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state 467 "only fully initialized state can have a null lock"); 468 return lock; 469 } 470 471 // Set the initialization lock to null so the object can be GC'ed. Any racing 472 // threads to get this lock will see a null lock and will not lock. 473 // That's okay because they all check for initialized state after getting 474 // the lock and return. 475 void InstanceKlass::fence_and_clear_init_lock() { 476 // make sure previous stores are all done, notably the init_state. 477 OrderAccess::storestore(); 478 java_lang_Class::set_init_lock(java_mirror(), NULL); 479 assert(!is_not_initialized(), "class must be initialized now"); 480 } 481 482 void InstanceKlass::eager_initialize_impl(instanceKlassHandle this_k) { 483 EXCEPTION_MARK; 484 oop init_lock = this_k->init_lock(); 485 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 486 487 // abort if someone beat us to the initialization 488 if (!this_k->is_not_initialized()) return; // note: not equivalent to is_initialized() 489 490 ClassState old_state = this_k->init_state(); 491 link_class_impl(this_k, true, THREAD); 492 if (HAS_PENDING_EXCEPTION) { 493 CLEAR_PENDING_EXCEPTION; 494 // Abort if linking the class throws an exception. 495 496 // Use a test to avoid redundantly resetting the state if there's 497 // no change. Set_init_state() asserts that state changes make 498 // progress, whereas here we might just be spinning in place. 499 if( old_state != this_k->_init_state ) 500 this_k->set_init_state (old_state); 501 } else { 502 // linking successfull, mark class as initialized 503 this_k->set_init_state (fully_initialized); 504 this_k->fence_and_clear_init_lock(); 505 // trace 506 if (TraceClassInitialization) { 507 ResourceMark rm(THREAD); 508 tty->print_cr("[Initialized %s without side effects]", this_k->external_name()); 509 } 510 } 511 } 512 513 514 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization 515 // process. The step comments refers to the procedure described in that section. 516 // Note: implementation moved to static method to expose the this pointer. 517 void InstanceKlass::initialize(TRAPS) { 518 if (this->should_be_initialized()) { 519 HandleMark hm(THREAD); 520 instanceKlassHandle this_k(THREAD, this); 521 initialize_impl(this_k, CHECK); 522 // Note: at this point the class may be initialized 523 // OR it may be in the state of being initialized 524 // in case of recursive initialization! 525 } else { 526 assert(is_initialized(), "sanity check"); 527 } 528 } 529 530 531 bool InstanceKlass::verify_code( 532 instanceKlassHandle this_k, bool throw_verifyerror, TRAPS) { 533 // 1) Verify the bytecodes 534 Verifier::Mode mode = 535 throw_verifyerror ? Verifier::ThrowException : Verifier::NoException; 536 return Verifier::verify(this_k, mode, this_k->should_verify_class(), THREAD); 537 } 538 539 540 // Used exclusively by the shared spaces dump mechanism to prevent 541 // classes mapped into the shared regions in new VMs from appearing linked. 542 543 void InstanceKlass::unlink_class() { 544 assert(is_linked(), "must be linked"); 545 _init_state = loaded; 546 } 547 548 void InstanceKlass::link_class(TRAPS) { 549 assert(is_loaded(), "must be loaded"); 550 if (!is_linked()) { 551 HandleMark hm(THREAD); 552 instanceKlassHandle this_k(THREAD, this); 553 link_class_impl(this_k, true, CHECK); 554 } 555 } 556 557 // Called to verify that a class can link during initialization, without 558 // throwing a VerifyError. 559 bool InstanceKlass::link_class_or_fail(TRAPS) { 560 assert(is_loaded(), "must be loaded"); 561 if (!is_linked()) { 562 HandleMark hm(THREAD); 563 instanceKlassHandle this_k(THREAD, this); 564 link_class_impl(this_k, false, CHECK_false); 565 } 566 return is_linked(); 567 } 568 569 bool InstanceKlass::link_class_impl( 570 instanceKlassHandle this_k, bool throw_verifyerror, TRAPS) { 571 // check for error state 572 if (this_k->is_in_error_state()) { 573 ResourceMark rm(THREAD); 574 THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(), 575 this_k->external_name(), false); 576 } 577 // return if already verified 578 if (this_k->is_linked()) { 579 return true; 580 } 581 582 // Timing 583 // timer handles recursion 584 assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl"); 585 JavaThread* jt = (JavaThread*)THREAD; 586 587 // link super class before linking this class 588 instanceKlassHandle super(THREAD, this_k->super()); 589 if (super.not_null()) { 590 if (super->is_interface()) { // check if super class is an interface 591 ResourceMark rm(THREAD); 592 Exceptions::fthrow( 593 THREAD_AND_LOCATION, 594 vmSymbols::java_lang_IncompatibleClassChangeError(), 595 "class %s has interface %s as super class", 596 this_k->external_name(), 597 super->external_name() 598 ); 599 return false; 600 } 601 602 link_class_impl(super, throw_verifyerror, CHECK_false); 603 } 604 605 // link all interfaces implemented by this class before linking this class 606 Array<Klass*>* interfaces = this_k->local_interfaces(); 607 int num_interfaces = interfaces->length(); 608 for (int index = 0; index < num_interfaces; index++) { 609 HandleMark hm(THREAD); 610 instanceKlassHandle ih(THREAD, interfaces->at(index)); 611 link_class_impl(ih, throw_verifyerror, CHECK_false); 612 } 613 614 // in case the class is linked in the process of linking its superclasses 615 if (this_k->is_linked()) { 616 return true; 617 } 618 619 // trace only the link time for this klass that includes 620 // the verification time 621 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(), 622 ClassLoader::perf_class_link_selftime(), 623 ClassLoader::perf_classes_linked(), 624 jt->get_thread_stat()->perf_recursion_counts_addr(), 625 jt->get_thread_stat()->perf_timers_addr(), 626 PerfClassTraceTime::CLASS_LINK); 627 628 // verification & rewriting 629 { 630 oop init_lock = this_k->init_lock(); 631 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 632 // rewritten will have been set if loader constraint error found 633 // on an earlier link attempt 634 // don't verify or rewrite if already rewritten 635 636 if (!this_k->is_linked()) { 637 if (!this_k->is_rewritten()) { 638 { 639 // Timer includes any side effects of class verification (resolution, 640 // etc), but not recursive entry into verify_code(). 641 PerfClassTraceTime timer(ClassLoader::perf_class_verify_time(), 642 ClassLoader::perf_class_verify_selftime(), 643 ClassLoader::perf_classes_verified(), 644 jt->get_thread_stat()->perf_recursion_counts_addr(), 645 jt->get_thread_stat()->perf_timers_addr(), 646 PerfClassTraceTime::CLASS_VERIFY); 647 bool verify_ok = verify_code(this_k, throw_verifyerror, THREAD); 648 if (!verify_ok) { 649 return false; 650 } 651 } 652 653 // Just in case a side-effect of verify linked this class already 654 // (which can sometimes happen since the verifier loads classes 655 // using custom class loaders, which are free to initialize things) 656 if (this_k->is_linked()) { 657 return true; 658 } 659 660 // also sets rewritten 661 this_k->rewrite_class(CHECK_false); 662 } 663 664 // relocate jsrs and link methods after they are all rewritten 665 this_k->link_methods(CHECK_false); 666 667 // Initialize the vtable and interface table after 668 // methods have been rewritten since rewrite may 669 // fabricate new Method*s. 670 // also does loader constraint checking 671 if (!this_k()->is_shared()) { 672 ResourceMark rm(THREAD); 673 this_k->vtable()->initialize_vtable(true, CHECK_false); 674 this_k->itable()->initialize_itable(true, CHECK_false); 675 } 676 #ifdef ASSERT 677 else { 678 ResourceMark rm(THREAD); 679 this_k->vtable()->verify(tty, true); 680 // In case itable verification is ever added. 681 // this_k->itable()->verify(tty, true); 682 } 683 #endif 684 this_k->set_init_state(linked); 685 if (JvmtiExport::should_post_class_prepare()) { 686 Thread *thread = THREAD; 687 assert(thread->is_Java_thread(), "thread->is_Java_thread()"); 688 JvmtiExport::post_class_prepare((JavaThread *) thread, this_k()); 689 } 690 } 691 } 692 return true; 693 } 694 695 696 // Rewrite the byte codes of all of the methods of a class. 697 // The rewriter must be called exactly once. Rewriting must happen after 698 // verification but before the first method of the class is executed. 699 void InstanceKlass::rewrite_class(TRAPS) { 700 assert(is_loaded(), "must be loaded"); 701 instanceKlassHandle this_k(THREAD, this); 702 if (this_k->is_rewritten()) { 703 assert(this_k()->is_shared(), "rewriting an unshared class?"); 704 return; 705 } 706 Rewriter::rewrite(this_k, CHECK); 707 this_k->set_rewritten(); 708 } 709 710 // Now relocate and link method entry points after class is rewritten. 711 // This is outside is_rewritten flag. In case of an exception, it can be 712 // executed more than once. 713 void InstanceKlass::link_methods(TRAPS) { 714 int len = methods()->length(); 715 for (int i = len-1; i >= 0; i--) { 716 methodHandle m(THREAD, methods()->at(i)); 717 718 // Set up method entry points for compiler and interpreter . 719 m->link_method(m, CHECK); 720 721 // This is for JVMTI and unrelated to relocator but the last thing we do 722 #ifdef ASSERT 723 if (StressMethodComparator) { 724 ResourceMark rm(THREAD); 725 static int nmc = 0; 726 for (int j = i; j >= 0 && j >= i-4; j--) { 727 if ((++nmc % 1000) == 0) tty->print_cr("Have run MethodComparator %d times...", nmc); 728 bool z = MethodComparator::methods_EMCP(m(), 729 methods()->at(j)); 730 if (j == i && !z) { 731 tty->print("MethodComparator FAIL: "); m->print(); m->print_codes(); 732 assert(z, "method must compare equal to itself"); 733 } 734 } 735 } 736 #endif //ASSERT 737 } 738 } 739 740 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access) 741 void InstanceKlass::initialize_super_interfaces(instanceKlassHandle this_k, TRAPS) { 742 if (this_k->has_default_methods()) { 743 for (int i = 0; i < this_k->local_interfaces()->length(); ++i) { 744 Klass* iface = this_k->local_interfaces()->at(i); 745 InstanceKlass* ik = InstanceKlass::cast(iface); 746 if (ik->should_be_initialized()) { 747 if (ik->has_default_methods()) { 748 ik->initialize_super_interfaces(ik, THREAD); 749 } 750 // Only initialize() interfaces that "declare" concrete methods. 751 // has_default_methods drives searching superinterfaces since it 752 // means has_default_methods in its superinterface hierarchy 753 if (!HAS_PENDING_EXCEPTION && ik->declares_default_methods()) { 754 ik->initialize(THREAD); 755 } 756 if (HAS_PENDING_EXCEPTION) { 757 Handle e(THREAD, PENDING_EXCEPTION); 758 CLEAR_PENDING_EXCEPTION; 759 { 760 EXCEPTION_MARK; 761 // Locks object, set state, and notify all waiting threads 762 this_k->set_initialization_state_and_notify( 763 initialization_error, THREAD); 764 765 // ignore any exception thrown, superclass initialization error is 766 // thrown below 767 CLEAR_PENDING_EXCEPTION; 768 } 769 THROW_OOP(e()); 770 } 771 } 772 } 773 } 774 } 775 776 void InstanceKlass::initialize_impl(instanceKlassHandle this_k, TRAPS) { 777 // Make sure klass is linked (verified) before initialization 778 // A class could already be verified, since it has been reflected upon. 779 this_k->link_class(CHECK); 780 781 DTRACE_CLASSINIT_PROBE(required, InstanceKlass::cast(this_k()), -1); 782 783 bool wait = false; 784 785 // refer to the JVM book page 47 for description of steps 786 // Step 1 787 { 788 oop init_lock = this_k->init_lock(); 789 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 790 791 Thread *self = THREAD; // it's passed the current thread 792 793 // Step 2 794 // If we were to use wait() instead of waitInterruptibly() then 795 // we might end up throwing IE from link/symbol resolution sites 796 // that aren't expected to throw. This would wreak havoc. See 6320309. 797 while(this_k->is_being_initialized() && !this_k->is_reentrant_initialization(self)) { 798 wait = true; 799 ol.waitUninterruptibly(CHECK); 800 } 801 802 // Step 3 803 if (this_k->is_being_initialized() && this_k->is_reentrant_initialization(self)) { 804 DTRACE_CLASSINIT_PROBE_WAIT(recursive, InstanceKlass::cast(this_k()), -1,wait); 805 return; 806 } 807 808 // Step 4 809 if (this_k->is_initialized()) { 810 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, InstanceKlass::cast(this_k()), -1,wait); 811 return; 812 } 813 814 // Step 5 815 if (this_k->is_in_error_state()) { 816 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, InstanceKlass::cast(this_k()), -1,wait); 817 ResourceMark rm(THREAD); 818 const char* desc = "Could not initialize class "; 819 const char* className = this_k->external_name(); 820 size_t msglen = strlen(desc) + strlen(className) + 1; 821 char* message = NEW_RESOURCE_ARRAY(char, msglen); 822 if (NULL == message) { 823 // Out of memory: can't create detailed error message 824 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className); 825 } else { 826 jio_snprintf(message, msglen, "%s%s", desc, className); 827 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message); 828 } 829 } 830 831 // Step 6 832 this_k->set_init_state(being_initialized); 833 this_k->set_init_thread(self); 834 } 835 836 // Step 7 837 Klass* super_klass = this_k->super(); 838 if (super_klass != NULL && !this_k->is_interface() && super_klass->should_be_initialized()) { 839 super_klass->initialize(THREAD); 840 841 if (HAS_PENDING_EXCEPTION) { 842 Handle e(THREAD, PENDING_EXCEPTION); 843 CLEAR_PENDING_EXCEPTION; 844 { 845 EXCEPTION_MARK; 846 this_k->set_initialization_state_and_notify(initialization_error, THREAD); // Locks object, set state, and notify all waiting threads 847 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, superclass initialization error is thrown below 848 } 849 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, InstanceKlass::cast(this_k()), -1,wait); 850 THROW_OOP(e()); 851 } 852 } 853 854 // Recursively initialize any superinterfaces that declare default methods 855 // Only need to recurse if has_default_methods which includes declaring and 856 // inheriting default methods 857 if (this_k->has_default_methods()) { 858 this_k->initialize_super_interfaces(this_k, CHECK); 859 } 860 861 // Step 8 862 { 863 assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl"); 864 JavaThread* jt = (JavaThread*)THREAD; 865 DTRACE_CLASSINIT_PROBE_WAIT(clinit, InstanceKlass::cast(this_k()), -1,wait); 866 // Timer includes any side effects of class initialization (resolution, 867 // etc), but not recursive entry into call_class_initializer(). 868 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(), 869 ClassLoader::perf_class_init_selftime(), 870 ClassLoader::perf_classes_inited(), 871 jt->get_thread_stat()->perf_recursion_counts_addr(), 872 jt->get_thread_stat()->perf_timers_addr(), 873 PerfClassTraceTime::CLASS_CLINIT); 874 this_k->call_class_initializer(THREAD); 875 } 876 877 // Step 9 878 if (!HAS_PENDING_EXCEPTION) { 879 this_k->set_initialization_state_and_notify(fully_initialized, CHECK); 880 { ResourceMark rm(THREAD); 881 debug_only(this_k->vtable()->verify(tty, true);) 882 } 883 } 884 else { 885 // Step 10 and 11 886 Handle e(THREAD, PENDING_EXCEPTION); 887 CLEAR_PENDING_EXCEPTION; 888 // JVMTI has already reported the pending exception 889 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 890 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 891 { 892 EXCEPTION_MARK; 893 this_k->set_initialization_state_and_notify(initialization_error, THREAD); 894 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below 895 // JVMTI has already reported the pending exception 896 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 897 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 898 } 899 DTRACE_CLASSINIT_PROBE_WAIT(error, InstanceKlass::cast(this_k()), -1,wait); 900 if (e->is_a(SystemDictionary::Error_klass())) { 901 THROW_OOP(e()); 902 } else { 903 JavaCallArguments args(e); 904 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(), 905 vmSymbols::throwable_void_signature(), 906 &args); 907 } 908 } 909 DTRACE_CLASSINIT_PROBE_WAIT(end, InstanceKlass::cast(this_k()), -1,wait); 910 } 911 912 913 // Note: implementation moved to static method to expose the this pointer. 914 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) { 915 instanceKlassHandle kh(THREAD, this); 916 set_initialization_state_and_notify_impl(kh, state, CHECK); 917 } 918 919 void InstanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_k, ClassState state, TRAPS) { 920 oop init_lock = this_k->init_lock(); 921 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 922 this_k->set_init_state(state); 923 this_k->fence_and_clear_init_lock(); 924 ol.notify_all(CHECK); 925 } 926 927 // The embedded _implementor field can only record one implementor. 928 // When there are more than one implementors, the _implementor field 929 // is set to the interface Klass* itself. Following are the possible 930 // values for the _implementor field: 931 // NULL - no implementor 932 // implementor Klass* - one implementor 933 // self - more than one implementor 934 // 935 // The _implementor field only exists for interfaces. 936 void InstanceKlass::add_implementor(Klass* k) { 937 assert(Compile_lock->owned_by_self(), ""); 938 assert(is_interface(), "not interface"); 939 // Filter out my subinterfaces. 940 // (Note: Interfaces are never on the subklass list.) 941 if (InstanceKlass::cast(k)->is_interface()) return; 942 943 // Filter out subclasses whose supers already implement me. 944 // (Note: CHA must walk subclasses of direct implementors 945 // in order to locate indirect implementors.) 946 Klass* sk = InstanceKlass::cast(k)->super(); 947 if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this)) 948 // We only need to check one immediate superclass, since the 949 // implements_interface query looks at transitive_interfaces. 950 // Any supers of the super have the same (or fewer) transitive_interfaces. 951 return; 952 953 Klass* ik = implementor(); 954 if (ik == NULL) { 955 set_implementor(k); 956 } else if (ik != this) { 957 // There is already an implementor. Use itself as an indicator of 958 // more than one implementors. 959 set_implementor(this); 960 } 961 962 // The implementor also implements the transitive_interfaces 963 for (int index = 0; index < local_interfaces()->length(); index++) { 964 InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k); 965 } 966 } 967 968 void InstanceKlass::init_implementor() { 969 if (is_interface()) { 970 set_implementor(NULL); 971 } 972 } 973 974 975 void InstanceKlass::process_interfaces(Thread *thread) { 976 // link this class into the implementors list of every interface it implements 977 for (int i = local_interfaces()->length() - 1; i >= 0; i--) { 978 assert(local_interfaces()->at(i)->is_klass(), "must be a klass"); 979 InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i)); 980 assert(interf->is_interface(), "expected interface"); 981 interf->add_implementor(this); 982 } 983 } 984 985 bool InstanceKlass::can_be_primary_super_slow() const { 986 if (is_interface()) 987 return false; 988 else 989 return Klass::can_be_primary_super_slow(); 990 } 991 992 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots) { 993 // The secondaries are the implemented interfaces. 994 InstanceKlass* ik = InstanceKlass::cast(this); 995 Array<Klass*>* interfaces = ik->transitive_interfaces(); 996 int num_secondaries = num_extra_slots + interfaces->length(); 997 if (num_secondaries == 0) { 998 // Must share this for correct bootstrapping! 999 set_secondary_supers(Universe::the_empty_klass_array()); 1000 return NULL; 1001 } else if (num_extra_slots == 0) { 1002 // The secondary super list is exactly the same as the transitive interfaces. 1003 // Redefine classes has to be careful not to delete this! 1004 set_secondary_supers(interfaces); 1005 return NULL; 1006 } else { 1007 // Copy transitive interfaces to a temporary growable array to be constructed 1008 // into the secondary super list with extra slots. 1009 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length()); 1010 for (int i = 0; i < interfaces->length(); i++) { 1011 secondaries->push(interfaces->at(i)); 1012 } 1013 return secondaries; 1014 } 1015 } 1016 1017 bool InstanceKlass::compute_is_subtype_of(Klass* k) { 1018 if (k->is_interface()) { 1019 return implements_interface(k); 1020 } else { 1021 return Klass::compute_is_subtype_of(k); 1022 } 1023 } 1024 1025 bool InstanceKlass::implements_interface(Klass* k) const { 1026 if (this == k) return true; 1027 assert(k->is_interface(), "should be an interface class"); 1028 for (int i = 0; i < transitive_interfaces()->length(); i++) { 1029 if (transitive_interfaces()->at(i) == k) { 1030 return true; 1031 } 1032 } 1033 return false; 1034 } 1035 1036 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const { 1037 // Verify direct super interface 1038 if (this == k) return true; 1039 assert(k->is_interface(), "should be an interface class"); 1040 for (int i = 0; i < local_interfaces()->length(); i++) { 1041 if (local_interfaces()->at(i) == k) { 1042 return true; 1043 } 1044 } 1045 return false; 1046 } 1047 1048 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) { 1049 if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException()); 1050 if (length > arrayOopDesc::max_array_length(T_OBJECT)) { 1051 report_java_out_of_memory("Requested array size exceeds VM limit"); 1052 JvmtiExport::post_array_size_exhausted(); 1053 THROW_OOP_0(Universe::out_of_memory_error_array_size()); 1054 } 1055 int size = objArrayOopDesc::object_size(length); 1056 Klass* ak = array_klass(n, CHECK_NULL); 1057 KlassHandle h_ak (THREAD, ak); 1058 objArrayOop o = 1059 (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL); 1060 return o; 1061 } 1062 1063 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) { 1064 if (TraceFinalizerRegistration) { 1065 tty->print("Registered "); 1066 i->print_value_on(tty); 1067 tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", (address)i); 1068 } 1069 instanceHandle h_i(THREAD, i); 1070 // Pass the handle as argument, JavaCalls::call expects oop as jobjects 1071 JavaValue result(T_VOID); 1072 JavaCallArguments args(h_i); 1073 methodHandle mh (THREAD, Universe::finalizer_register_method()); 1074 JavaCalls::call(&result, mh, &args, CHECK_NULL); 1075 return h_i(); 1076 } 1077 1078 instanceOop InstanceKlass::allocate_instance(TRAPS) { 1079 bool has_finalizer_flag = has_finalizer(); // Query before possible GC 1080 int size = size_helper(); // Query before forming handle. 1081 1082 KlassHandle h_k(THREAD, this); 1083 1084 instanceOop i; 1085 1086 i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL); 1087 if (has_finalizer_flag && !RegisterFinalizersAtInit) { 1088 i = register_finalizer(i, CHECK_NULL); 1089 } 1090 return i; 1091 } 1092 1093 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) { 1094 if (is_interface() || is_abstract()) { 1095 ResourceMark rm(THREAD); 1096 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError() 1097 : vmSymbols::java_lang_InstantiationException(), external_name()); 1098 } 1099 if (this == SystemDictionary::Class_klass()) { 1100 ResourceMark rm(THREAD); 1101 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError() 1102 : vmSymbols::java_lang_IllegalAccessException(), external_name()); 1103 } 1104 } 1105 1106 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) { 1107 instanceKlassHandle this_k(THREAD, this); 1108 return array_klass_impl(this_k, or_null, n, THREAD); 1109 } 1110 1111 Klass* InstanceKlass::array_klass_impl(instanceKlassHandle this_k, bool or_null, int n, TRAPS) { 1112 if (this_k->array_klasses() == NULL) { 1113 if (or_null) return NULL; 1114 1115 ResourceMark rm; 1116 JavaThread *jt = (JavaThread *)THREAD; 1117 { 1118 // Atomic creation of array_klasses 1119 MutexLocker mc(Compile_lock, THREAD); // for vtables 1120 MutexLocker ma(MultiArray_lock, THREAD); 1121 1122 // Check if update has already taken place 1123 if (this_k->array_klasses() == NULL) { 1124 Klass* k = ObjArrayKlass::allocate_objArray_klass(this_k->class_loader_data(), 1, this_k, CHECK_NULL); 1125 this_k->set_array_klasses(k); 1126 } 1127 } 1128 } 1129 // _this will always be set at this point 1130 ObjArrayKlass* oak = (ObjArrayKlass*)this_k->array_klasses(); 1131 if (or_null) { 1132 return oak->array_klass_or_null(n); 1133 } 1134 return oak->array_klass(n, THREAD); 1135 } 1136 1137 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) { 1138 return array_klass_impl(or_null, 1, THREAD); 1139 } 1140 1141 void InstanceKlass::call_class_initializer(TRAPS) { 1142 instanceKlassHandle ik (THREAD, this); 1143 call_class_initializer_impl(ik, THREAD); 1144 } 1145 1146 static int call_class_initializer_impl_counter = 0; // for debugging 1147 1148 Method* InstanceKlass::class_initializer() { 1149 Method* clinit = find_method( 1150 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature()); 1151 if (clinit != NULL && clinit->has_valid_initializer_flags()) { 1152 return clinit; 1153 } 1154 return NULL; 1155 } 1156 1157 void InstanceKlass::call_class_initializer_impl(instanceKlassHandle this_k, TRAPS) { 1158 if (ReplayCompiles && 1159 (ReplaySuppressInitializers == 1 || 1160 ReplaySuppressInitializers >= 2 && this_k->class_loader() != NULL)) { 1161 // Hide the existence of the initializer for the purpose of replaying the compile 1162 return; 1163 } 1164 1165 methodHandle h_method(THREAD, this_k->class_initializer()); 1166 assert(!this_k->is_initialized(), "we cannot initialize twice"); 1167 if (TraceClassInitialization) { 1168 tty->print("%d Initializing ", call_class_initializer_impl_counter++); 1169 this_k->name()->print_value(); 1170 tty->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", (address)this_k()); 1171 } 1172 if (h_method() != NULL) { 1173 JavaCallArguments args; // No arguments 1174 JavaValue result(T_VOID); 1175 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args) 1176 } 1177 } 1178 1179 1180 void InstanceKlass::mask_for(methodHandle method, int bci, 1181 InterpreterOopMap* entry_for) { 1182 // Dirty read, then double-check under a lock. 1183 if (_oop_map_cache == NULL) { 1184 // Otherwise, allocate a new one. 1185 MutexLocker x(OopMapCacheAlloc_lock); 1186 // First time use. Allocate a cache in C heap 1187 if (_oop_map_cache == NULL) { 1188 // Release stores from OopMapCache constructor before assignment 1189 // to _oop_map_cache. C++ compilers on ppc do not emit the 1190 // required memory barrier only because of the volatile 1191 // qualifier of _oop_map_cache. 1192 OrderAccess::release_store_ptr(&_oop_map_cache, new OopMapCache()); 1193 } 1194 } 1195 // _oop_map_cache is constant after init; lookup below does is own locking. 1196 _oop_map_cache->lookup(method, bci, entry_for); 1197 } 1198 1199 1200 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1201 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1202 Symbol* f_name = fs.name(); 1203 Symbol* f_sig = fs.signature(); 1204 if (f_name == name && f_sig == sig) { 1205 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1206 return true; 1207 } 1208 } 1209 return false; 1210 } 1211 1212 1213 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1214 const int n = local_interfaces()->length(); 1215 for (int i = 0; i < n; i++) { 1216 Klass* intf1 = local_interfaces()->at(i); 1217 assert(intf1->is_interface(), "just checking type"); 1218 // search for field in current interface 1219 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) { 1220 assert(fd->is_static(), "interface field must be static"); 1221 return intf1; 1222 } 1223 // search for field in direct superinterfaces 1224 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd); 1225 if (intf2 != NULL) return intf2; 1226 } 1227 // otherwise field lookup fails 1228 return NULL; 1229 } 1230 1231 1232 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1233 // search order according to newest JVM spec (5.4.3.2, p.167). 1234 // 1) search for field in current klass 1235 if (find_local_field(name, sig, fd)) { 1236 return const_cast<InstanceKlass*>(this); 1237 } 1238 // 2) search for field recursively in direct superinterfaces 1239 { Klass* intf = find_interface_field(name, sig, fd); 1240 if (intf != NULL) return intf; 1241 } 1242 // 3) apply field lookup recursively if superclass exists 1243 { Klass* supr = super(); 1244 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd); 1245 } 1246 // 4) otherwise field lookup fails 1247 return NULL; 1248 } 1249 1250 1251 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const { 1252 // search order according to newest JVM spec (5.4.3.2, p.167). 1253 // 1) search for field in current klass 1254 if (find_local_field(name, sig, fd)) { 1255 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this); 1256 } 1257 // 2) search for field recursively in direct superinterfaces 1258 if (is_static) { 1259 Klass* intf = find_interface_field(name, sig, fd); 1260 if (intf != NULL) return intf; 1261 } 1262 // 3) apply field lookup recursively if superclass exists 1263 { Klass* supr = super(); 1264 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd); 1265 } 1266 // 4) otherwise field lookup fails 1267 return NULL; 1268 } 1269 1270 1271 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1272 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1273 if (fs.offset() == offset) { 1274 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1275 if (fd->is_static() == is_static) return true; 1276 } 1277 } 1278 return false; 1279 } 1280 1281 1282 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1283 Klass* klass = const_cast<InstanceKlass*>(this); 1284 while (klass != NULL) { 1285 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) { 1286 return true; 1287 } 1288 klass = klass->super(); 1289 } 1290 return false; 1291 } 1292 1293 1294 void InstanceKlass::methods_do(void f(Method* method)) { 1295 // Methods aren't stable until they are loaded. This can be read outside 1296 // a lock through the ClassLoaderData for profiling 1297 if (!is_loaded()) { 1298 return; 1299 } 1300 1301 int len = methods()->length(); 1302 for (int index = 0; index < len; index++) { 1303 Method* m = methods()->at(index); 1304 assert(m->is_method(), "must be method"); 1305 f(m); 1306 } 1307 } 1308 1309 1310 void InstanceKlass::do_local_static_fields(FieldClosure* cl) { 1311 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1312 if (fs.access_flags().is_static()) { 1313 fieldDescriptor& fd = fs.field_descriptor(); 1314 cl->do_field(&fd); 1315 } 1316 } 1317 } 1318 1319 1320 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) { 1321 instanceKlassHandle h_this(THREAD, this); 1322 do_local_static_fields_impl(h_this, f, mirror, CHECK); 1323 } 1324 1325 1326 void InstanceKlass::do_local_static_fields_impl(instanceKlassHandle this_k, 1327 void f(fieldDescriptor* fd, Handle, TRAPS), Handle mirror, TRAPS) { 1328 for (JavaFieldStream fs(this_k()); !fs.done(); fs.next()) { 1329 if (fs.access_flags().is_static()) { 1330 fieldDescriptor& fd = fs.field_descriptor(); 1331 f(&fd, mirror, CHECK); 1332 } 1333 } 1334 } 1335 1336 1337 static int compare_fields_by_offset(int* a, int* b) { 1338 return a[0] - b[0]; 1339 } 1340 1341 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) { 1342 InstanceKlass* super = superklass(); 1343 if (super != NULL) { 1344 super->do_nonstatic_fields(cl); 1345 } 1346 fieldDescriptor fd; 1347 int length = java_fields_count(); 1348 // In DebugInfo nonstatic fields are sorted by offset. 1349 int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass); 1350 int j = 0; 1351 for (int i = 0; i < length; i += 1) { 1352 fd.reinitialize(this, i); 1353 if (!fd.is_static()) { 1354 fields_sorted[j + 0] = fd.offset(); 1355 fields_sorted[j + 1] = i; 1356 j += 2; 1357 } 1358 } 1359 if (j > 0) { 1360 length = j; 1361 // _sort_Fn is defined in growableArray.hpp. 1362 qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset); 1363 for (int i = 0; i < length; i += 2) { 1364 fd.reinitialize(this, fields_sorted[i + 1]); 1365 assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields"); 1366 cl->do_field(&fd); 1367 } 1368 } 1369 FREE_C_HEAP_ARRAY(int, fields_sorted); 1370 } 1371 1372 1373 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) { 1374 if (array_klasses() != NULL) 1375 ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD); 1376 } 1377 1378 void InstanceKlass::array_klasses_do(void f(Klass* k)) { 1379 if (array_klasses() != NULL) 1380 ArrayKlass::cast(array_klasses())->array_klasses_do(f); 1381 } 1382 1383 #ifdef ASSERT 1384 static int linear_search(Array<Method*>* methods, Symbol* name, Symbol* signature) { 1385 int len = methods->length(); 1386 for (int index = 0; index < len; index++) { 1387 Method* m = methods->at(index); 1388 assert(m->is_method(), "must be method"); 1389 if (m->signature() == signature && m->name() == name) { 1390 return index; 1391 } 1392 } 1393 return -1; 1394 } 1395 #endif 1396 1397 static int binary_search(Array<Method*>* methods, Symbol* name) { 1398 int len = methods->length(); 1399 // methods are sorted, so do binary search 1400 int l = 0; 1401 int h = len - 1; 1402 while (l <= h) { 1403 int mid = (l + h) >> 1; 1404 Method* m = methods->at(mid); 1405 assert(m->is_method(), "must be method"); 1406 int res = m->name()->fast_compare(name); 1407 if (res == 0) { 1408 return mid; 1409 } else if (res < 0) { 1410 l = mid + 1; 1411 } else { 1412 h = mid - 1; 1413 } 1414 } 1415 return -1; 1416 } 1417 1418 // find_method looks up the name/signature in the local methods array 1419 Method* InstanceKlass::find_method(Symbol* name, Symbol* signature) const { 1420 return find_method_impl(name, signature, find_overpass, find_static); 1421 } 1422 1423 Method* InstanceKlass::find_method_impl(Symbol* name, Symbol* signature, 1424 OverpassLookupMode overpass_mode, StaticLookupMode static_mode) const { 1425 return InstanceKlass::find_method_impl(methods(), name, signature, overpass_mode, static_mode); 1426 } 1427 1428 // find_instance_method looks up the name/signature in the local methods array 1429 // and skips over static methods 1430 Method* InstanceKlass::find_instance_method( 1431 Array<Method*>* methods, Symbol* name, Symbol* signature) { 1432 Method* meth = InstanceKlass::find_method_impl(methods, name, signature, 1433 find_overpass, skip_static); 1434 assert(((meth == NULL) || !meth->is_static()), "find_instance_method should have skipped statics"); 1435 return meth; 1436 } 1437 1438 // find_instance_method looks up the name/signature in the local methods array 1439 // and skips over static methods 1440 Method* InstanceKlass::find_instance_method(Symbol* name, Symbol* signature) { 1441 return InstanceKlass::find_instance_method(methods(), name, signature); 1442 } 1443 1444 // find_method looks up the name/signature in the local methods array 1445 Method* InstanceKlass::find_method( 1446 Array<Method*>* methods, Symbol* name, Symbol* signature) { 1447 return InstanceKlass::find_method_impl(methods, name, signature, find_overpass, find_static); 1448 } 1449 1450 Method* InstanceKlass::find_method_impl( 1451 Array<Method*>* methods, Symbol* name, Symbol* signature, OverpassLookupMode overpass_mode, StaticLookupMode static_mode) { 1452 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode); 1453 return hit >= 0 ? methods->at(hit): NULL; 1454 } 1455 1456 bool InstanceKlass::method_matches(Method* m, Symbol* signature, bool skipping_overpass, bool skipping_static) { 1457 return (m->signature() == signature) && 1458 (!skipping_overpass || !m->is_overpass()) && 1459 (!skipping_static || !m->is_static()); 1460 } 1461 1462 // Used directly for default_methods to find the index into the 1463 // default_vtable_indices, and indirectly by find_method 1464 // find_method_index looks in the local methods array to return the index 1465 // of the matching name/signature. If, overpass methods are being ignored, 1466 // the search continues to find a potential non-overpass match. This capability 1467 // is important during method resolution to prefer a static method, for example, 1468 // over an overpass method. 1469 int InstanceKlass::find_method_index( 1470 Array<Method*>* methods, Symbol* name, Symbol* signature, OverpassLookupMode overpass_mode, StaticLookupMode static_mode) { 1471 bool skipping_overpass = (overpass_mode == skip_overpass); 1472 bool skipping_static = (static_mode == skip_static); 1473 int hit = binary_search(methods, name); 1474 if (hit != -1) { 1475 Method* m = methods->at(hit); 1476 1477 // Do linear search to find matching signature. First, quick check 1478 // for common case, ignoring overpasses if requested. 1479 if (method_matches(m, signature, skipping_overpass, skipping_static)) return hit; 1480 1481 // search downwards through overloaded methods 1482 int i; 1483 for (i = hit - 1; i >= 0; --i) { 1484 Method* m = methods->at(i); 1485 assert(m->is_method(), "must be method"); 1486 if (m->name() != name) break; 1487 if (method_matches(m, signature, skipping_overpass, skipping_static)) return i; 1488 } 1489 // search upwards 1490 for (i = hit + 1; i < methods->length(); ++i) { 1491 Method* m = methods->at(i); 1492 assert(m->is_method(), "must be method"); 1493 if (m->name() != name) break; 1494 if (method_matches(m, signature, skipping_overpass, skipping_static)) return i; 1495 } 1496 // not found 1497 #ifdef ASSERT 1498 int index = (skipping_overpass || skipping_static) ? -1 : linear_search(methods, name, signature); 1499 assert(index == -1, err_msg("binary search should have found entry %d", index)); 1500 #endif 1501 } 1502 return -1; 1503 } 1504 int InstanceKlass::find_method_by_name(Symbol* name, int* end) { 1505 return find_method_by_name(methods(), name, end); 1506 } 1507 1508 int InstanceKlass::find_method_by_name( 1509 Array<Method*>* methods, Symbol* name, int* end_ptr) { 1510 assert(end_ptr != NULL, "just checking"); 1511 int start = binary_search(methods, name); 1512 int end = start + 1; 1513 if (start != -1) { 1514 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start; 1515 while (end < methods->length() && (methods->at(end))->name() == name) ++end; 1516 *end_ptr = end; 1517 return start; 1518 } 1519 return -1; 1520 } 1521 1522 // uncached_lookup_method searches both the local class methods array and all 1523 // superclasses methods arrays, skipping any overpass methods in superclasses. 1524 Method* InstanceKlass::uncached_lookup_method(Symbol* name, Symbol* signature, OverpassLookupMode overpass_mode) const { 1525 OverpassLookupMode overpass_local_mode = overpass_mode; 1526 Klass* klass = const_cast<InstanceKlass*>(this); 1527 while (klass != NULL) { 1528 Method* method = InstanceKlass::cast(klass)->find_method_impl(name, signature, overpass_local_mode, find_static); 1529 if (method != NULL) { 1530 return method; 1531 } 1532 klass = InstanceKlass::cast(klass)->super(); 1533 overpass_local_mode = skip_overpass; // Always ignore overpass methods in superclasses 1534 } 1535 return NULL; 1536 } 1537 1538 #ifdef ASSERT 1539 // search through class hierarchy and return true if this class or 1540 // one of the superclasses was redefined 1541 bool InstanceKlass::has_redefined_this_or_super() const { 1542 const InstanceKlass* klass = this; 1543 while (klass != NULL) { 1544 if (klass->has_been_redefined()) { 1545 return true; 1546 } 1547 klass = InstanceKlass::cast(klass->super()); 1548 } 1549 return false; 1550 } 1551 #endif 1552 1553 // lookup a method in the default methods list then in all transitive interfaces 1554 // Do NOT return private or static methods 1555 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name, 1556 Symbol* signature) const { 1557 Method* m = NULL; 1558 if (default_methods() != NULL) { 1559 m = find_method(default_methods(), name, signature); 1560 } 1561 // Look up interfaces 1562 if (m == NULL) { 1563 m = lookup_method_in_all_interfaces(name, signature, find_defaults); 1564 } 1565 return m; 1566 } 1567 1568 // lookup a method in all the interfaces that this class implements 1569 // Do NOT return private or static methods, new in JDK8 which are not externally visible 1570 // They should only be found in the initial InterfaceMethodRef 1571 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name, 1572 Symbol* signature, 1573 DefaultsLookupMode defaults_mode) const { 1574 Array<Klass*>* all_ifs = transitive_interfaces(); 1575 int num_ifs = all_ifs->length(); 1576 InstanceKlass *ik = NULL; 1577 for (int i = 0; i < num_ifs; i++) { 1578 ik = InstanceKlass::cast(all_ifs->at(i)); 1579 Method* m = ik->lookup_method(name, signature); 1580 if (m != NULL && m->is_public() && !m->is_static() && 1581 ((defaults_mode != skip_defaults) || !m->is_default_method())) { 1582 return m; 1583 } 1584 } 1585 return NULL; 1586 } 1587 1588 /* jni_id_for_impl for jfieldIds only */ 1589 JNIid* InstanceKlass::jni_id_for_impl(instanceKlassHandle this_k, int offset) { 1590 MutexLocker ml(JfieldIdCreation_lock); 1591 // Retry lookup after we got the lock 1592 JNIid* probe = this_k->jni_ids() == NULL ? NULL : this_k->jni_ids()->find(offset); 1593 if (probe == NULL) { 1594 // Slow case, allocate new static field identifier 1595 probe = new JNIid(this_k(), offset, this_k->jni_ids()); 1596 this_k->set_jni_ids(probe); 1597 } 1598 return probe; 1599 } 1600 1601 1602 /* jni_id_for for jfieldIds only */ 1603 JNIid* InstanceKlass::jni_id_for(int offset) { 1604 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1605 if (probe == NULL) { 1606 probe = jni_id_for_impl(this, offset); 1607 } 1608 return probe; 1609 } 1610 1611 u2 InstanceKlass::enclosing_method_data(int offset) { 1612 Array<jushort>* inner_class_list = inner_classes(); 1613 if (inner_class_list == NULL) { 1614 return 0; 1615 } 1616 int length = inner_class_list->length(); 1617 if (length % inner_class_next_offset == 0) { 1618 return 0; 1619 } else { 1620 int index = length - enclosing_method_attribute_size; 1621 assert(offset < enclosing_method_attribute_size, "invalid offset"); 1622 return inner_class_list->at(index + offset); 1623 } 1624 } 1625 1626 void InstanceKlass::set_enclosing_method_indices(u2 class_index, 1627 u2 method_index) { 1628 Array<jushort>* inner_class_list = inner_classes(); 1629 assert (inner_class_list != NULL, "_inner_classes list is not set up"); 1630 int length = inner_class_list->length(); 1631 if (length % inner_class_next_offset == enclosing_method_attribute_size) { 1632 int index = length - enclosing_method_attribute_size; 1633 inner_class_list->at_put( 1634 index + enclosing_method_class_index_offset, class_index); 1635 inner_class_list->at_put( 1636 index + enclosing_method_method_index_offset, method_index); 1637 } 1638 } 1639 1640 // Lookup or create a jmethodID. 1641 // This code is called by the VMThread and JavaThreads so the 1642 // locking has to be done very carefully to avoid deadlocks 1643 // and/or other cache consistency problems. 1644 // 1645 jmethodID InstanceKlass::get_jmethod_id(instanceKlassHandle ik_h, methodHandle method_h) { 1646 size_t idnum = (size_t)method_h->method_idnum(); 1647 jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire(); 1648 size_t length = 0; 1649 jmethodID id = NULL; 1650 1651 // We use a double-check locking idiom here because this cache is 1652 // performance sensitive. In the normal system, this cache only 1653 // transitions from NULL to non-NULL which is safe because we use 1654 // release_set_methods_jmethod_ids() to advertise the new cache. 1655 // A partially constructed cache should never be seen by a racing 1656 // thread. We also use release_store_ptr() to save a new jmethodID 1657 // in the cache so a partially constructed jmethodID should never be 1658 // seen either. Cache reads of existing jmethodIDs proceed without a 1659 // lock, but cache writes of a new jmethodID requires uniqueness and 1660 // creation of the cache itself requires no leaks so a lock is 1661 // generally acquired in those two cases. 1662 // 1663 // If the RedefineClasses() API has been used, then this cache can 1664 // grow and we'll have transitions from non-NULL to bigger non-NULL. 1665 // Cache creation requires no leaks and we require safety between all 1666 // cache accesses and freeing of the old cache so a lock is generally 1667 // acquired when the RedefineClasses() API has been used. 1668 1669 if (jmeths != NULL) { 1670 // the cache already exists 1671 if (!ik_h->idnum_can_increment()) { 1672 // the cache can't grow so we can just get the current values 1673 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1674 } else { 1675 // cache can grow so we have to be more careful 1676 if (Threads::number_of_threads() == 0 || 1677 SafepointSynchronize::is_at_safepoint()) { 1678 // we're single threaded or at a safepoint - no locking needed 1679 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1680 } else { 1681 MutexLocker ml(JmethodIdCreation_lock); 1682 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1683 } 1684 } 1685 } 1686 // implied else: 1687 // we need to allocate a cache so default length and id values are good 1688 1689 if (jmeths == NULL || // no cache yet 1690 length <= idnum || // cache is too short 1691 id == NULL) { // cache doesn't contain entry 1692 1693 // This function can be called by the VMThread so we have to do all 1694 // things that might block on a safepoint before grabbing the lock. 1695 // Otherwise, we can deadlock with the VMThread or have a cache 1696 // consistency issue. These vars keep track of what we might have 1697 // to free after the lock is dropped. 1698 jmethodID to_dealloc_id = NULL; 1699 jmethodID* to_dealloc_jmeths = NULL; 1700 1701 // may not allocate new_jmeths or use it if we allocate it 1702 jmethodID* new_jmeths = NULL; 1703 if (length <= idnum) { 1704 // allocate a new cache that might be used 1705 size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count()); 1706 new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass); 1707 memset(new_jmeths, 0, (size+1)*sizeof(jmethodID)); 1708 // cache size is stored in element[0], other elements offset by one 1709 new_jmeths[0] = (jmethodID)size; 1710 } 1711 1712 // allocate a new jmethodID that might be used 1713 jmethodID new_id = NULL; 1714 if (method_h->is_old() && !method_h->is_obsolete()) { 1715 // The method passed in is old (but not obsolete), we need to use the current version 1716 Method* current_method = ik_h->method_with_idnum((int)idnum); 1717 assert(current_method != NULL, "old and but not obsolete, so should exist"); 1718 new_id = Method::make_jmethod_id(ik_h->class_loader_data(), current_method); 1719 } else { 1720 // It is the current version of the method or an obsolete method, 1721 // use the version passed in 1722 new_id = Method::make_jmethod_id(ik_h->class_loader_data(), method_h()); 1723 } 1724 1725 if (Threads::number_of_threads() == 0 || 1726 SafepointSynchronize::is_at_safepoint()) { 1727 // we're single threaded or at a safepoint - no locking needed 1728 id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths, 1729 &to_dealloc_id, &to_dealloc_jmeths); 1730 } else { 1731 MutexLocker ml(JmethodIdCreation_lock); 1732 id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths, 1733 &to_dealloc_id, &to_dealloc_jmeths); 1734 } 1735 1736 // The lock has been dropped so we can free resources. 1737 // Free up either the old cache or the new cache if we allocated one. 1738 if (to_dealloc_jmeths != NULL) { 1739 FreeHeap(to_dealloc_jmeths); 1740 } 1741 // free up the new ID since it wasn't needed 1742 if (to_dealloc_id != NULL) { 1743 Method::destroy_jmethod_id(ik_h->class_loader_data(), to_dealloc_id); 1744 } 1745 } 1746 return id; 1747 } 1748 1749 // Figure out how many jmethodIDs haven't been allocated, and make 1750 // sure space for them is pre-allocated. This makes getting all 1751 // method ids much, much faster with classes with more than 8 1752 // methods, and has a *substantial* effect on performance with jvmti 1753 // code that loads all jmethodIDs for all classes. 1754 void InstanceKlass::ensure_space_for_methodids(int start_offset) { 1755 int new_jmeths = 0; 1756 int length = methods()->length(); 1757 for (int index = start_offset; index < length; index++) { 1758 Method* m = methods()->at(index); 1759 jmethodID id = m->find_jmethod_id_or_null(); 1760 if (id == NULL) { 1761 new_jmeths++; 1762 } 1763 } 1764 if (new_jmeths != 0) { 1765 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths); 1766 } 1767 } 1768 1769 // Common code to fetch the jmethodID from the cache or update the 1770 // cache with the new jmethodID. This function should never do anything 1771 // that causes the caller to go to a safepoint or we can deadlock with 1772 // the VMThread or have cache consistency issues. 1773 // 1774 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update( 1775 instanceKlassHandle ik_h, size_t idnum, jmethodID new_id, 1776 jmethodID* new_jmeths, jmethodID* to_dealloc_id_p, 1777 jmethodID** to_dealloc_jmeths_p) { 1778 assert(new_id != NULL, "sanity check"); 1779 assert(to_dealloc_id_p != NULL, "sanity check"); 1780 assert(to_dealloc_jmeths_p != NULL, "sanity check"); 1781 assert(Threads::number_of_threads() == 0 || 1782 SafepointSynchronize::is_at_safepoint() || 1783 JmethodIdCreation_lock->owned_by_self(), "sanity check"); 1784 1785 // reacquire the cache - we are locked, single threaded or at a safepoint 1786 jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire(); 1787 jmethodID id = NULL; 1788 size_t length = 0; 1789 1790 if (jmeths == NULL || // no cache yet 1791 (length = (size_t)jmeths[0]) <= idnum) { // cache is too short 1792 if (jmeths != NULL) { 1793 // copy any existing entries from the old cache 1794 for (size_t index = 0; index < length; index++) { 1795 new_jmeths[index+1] = jmeths[index+1]; 1796 } 1797 *to_dealloc_jmeths_p = jmeths; // save old cache for later delete 1798 } 1799 ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths); 1800 } else { 1801 // fetch jmethodID (if any) from the existing cache 1802 id = jmeths[idnum+1]; 1803 *to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete 1804 } 1805 if (id == NULL) { 1806 // No matching jmethodID in the existing cache or we have a new 1807 // cache or we just grew the cache. This cache write is done here 1808 // by the first thread to win the foot race because a jmethodID 1809 // needs to be unique once it is generally available. 1810 id = new_id; 1811 1812 // The jmethodID cache can be read while unlocked so we have to 1813 // make sure the new jmethodID is complete before installing it 1814 // in the cache. 1815 OrderAccess::release_store_ptr(&jmeths[idnum+1], id); 1816 } else { 1817 *to_dealloc_id_p = new_id; // save new id for later delete 1818 } 1819 return id; 1820 } 1821 1822 1823 // Common code to get the jmethodID cache length and the jmethodID 1824 // value at index idnum if there is one. 1825 // 1826 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache, 1827 size_t idnum, size_t *length_p, jmethodID* id_p) { 1828 assert(cache != NULL, "sanity check"); 1829 assert(length_p != NULL, "sanity check"); 1830 assert(id_p != NULL, "sanity check"); 1831 1832 // cache size is stored in element[0], other elements offset by one 1833 *length_p = (size_t)cache[0]; 1834 if (*length_p <= idnum) { // cache is too short 1835 *id_p = NULL; 1836 } else { 1837 *id_p = cache[idnum+1]; // fetch jmethodID (if any) 1838 } 1839 } 1840 1841 1842 // Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles 1843 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) { 1844 size_t idnum = (size_t)method->method_idnum(); 1845 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1846 size_t length; // length assigned as debugging crumb 1847 jmethodID id = NULL; 1848 if (jmeths != NULL && // If there is a cache 1849 (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough, 1850 id = jmeths[idnum+1]; // Look up the id (may be NULL) 1851 } 1852 return id; 1853 } 1854 1855 int nmethodBucket::decrement() { 1856 return Atomic::add(-1, (volatile int *)&_count); 1857 } 1858 1859 // 1860 // Walk the list of dependent nmethods searching for nmethods which 1861 // are dependent on the changes that were passed in and mark them for 1862 // deoptimization. Returns the number of nmethods found. 1863 // 1864 int InstanceKlass::mark_dependent_nmethods(DepChange& changes) { 1865 assert_locked_or_safepoint(CodeCache_lock); 1866 int found = 0; 1867 nmethodBucket* b = _dependencies; 1868 while (b != NULL) { 1869 nmethod* nm = b->get_nmethod(); 1870 // since dependencies aren't removed until an nmethod becomes a zombie, 1871 // the dependency list may contain nmethods which aren't alive. 1872 if (b->count() > 0 && nm->is_alive() && !nm->is_marked_for_deoptimization() && nm->check_dependency_on(changes)) { 1873 if (TraceDependencies) { 1874 ResourceMark rm; 1875 tty->print_cr("Marked for deoptimization"); 1876 tty->print_cr(" context = %s", this->external_name()); 1877 changes.print(); 1878 nm->print(); 1879 nm->print_dependencies(); 1880 } 1881 nm->mark_for_deoptimization(); 1882 found++; 1883 } 1884 b = b->next(); 1885 } 1886 return found; 1887 } 1888 1889 void InstanceKlass::clean_dependent_nmethods() { 1890 assert_locked_or_safepoint(CodeCache_lock); 1891 1892 if (has_unloaded_dependent()) { 1893 nmethodBucket* b = _dependencies; 1894 nmethodBucket* last = NULL; 1895 while (b != NULL) { 1896 assert(b->count() >= 0, err_msg("bucket count: %d", b->count())); 1897 1898 nmethodBucket* next = b->next(); 1899 1900 if (b->count() == 0) { 1901 if (last == NULL) { 1902 _dependencies = next; 1903 } else { 1904 last->set_next(next); 1905 } 1906 delete b; 1907 // last stays the same. 1908 } else { 1909 last = b; 1910 } 1911 1912 b = next; 1913 } 1914 set_has_unloaded_dependent(false); 1915 } 1916 #ifdef ASSERT 1917 else { 1918 // Verification 1919 for (nmethodBucket* b = _dependencies; b != NULL; b = b->next()) { 1920 assert(b->count() >= 0, err_msg("bucket count: %d", b->count())); 1921 assert(b->count() != 0, "empty buckets need to be cleaned"); 1922 } 1923 } 1924 #endif 1925 } 1926 1927 // 1928 // Add an nmethodBucket to the list of dependencies for this nmethod. 1929 // It's possible that an nmethod has multiple dependencies on this klass 1930 // so a count is kept for each bucket to guarantee that creation and 1931 // deletion of dependencies is consistent. 1932 // 1933 void InstanceKlass::add_dependent_nmethod(nmethod* nm) { 1934 assert_locked_or_safepoint(CodeCache_lock); 1935 nmethodBucket* b = _dependencies; 1936 nmethodBucket* last = NULL; 1937 while (b != NULL) { 1938 if (nm == b->get_nmethod()) { 1939 b->increment(); 1940 return; 1941 } 1942 b = b->next(); 1943 } 1944 _dependencies = new nmethodBucket(nm, _dependencies); 1945 } 1946 1947 1948 // 1949 // Decrement count of the nmethod in the dependency list and remove 1950 // the bucket competely when the count goes to 0. This method must 1951 // find a corresponding bucket otherwise there's a bug in the 1952 // recording of dependecies. 1953 // 1954 void InstanceKlass::remove_dependent_nmethod(nmethod* nm) { 1955 assert_locked_or_safepoint(CodeCache_lock); 1956 nmethodBucket* b = _dependencies; 1957 nmethodBucket* last = NULL; 1958 while (b != NULL) { 1959 if (nm == b->get_nmethod()) { 1960 int val = b->decrement(); 1961 guarantee(val >= 0, err_msg("Underflow: %d", val)); 1962 if (val == 0) { 1963 set_has_unloaded_dependent(true); 1964 } 1965 return; 1966 } 1967 last = b; 1968 b = b->next(); 1969 } 1970 #ifdef ASSERT 1971 tty->print_cr("### %s can't find dependent nmethod:", this->external_name()); 1972 nm->print(); 1973 #endif // ASSERT 1974 ShouldNotReachHere(); 1975 } 1976 1977 1978 #ifndef PRODUCT 1979 void InstanceKlass::print_dependent_nmethods(bool verbose) { 1980 nmethodBucket* b = _dependencies; 1981 int idx = 0; 1982 while (b != NULL) { 1983 nmethod* nm = b->get_nmethod(); 1984 tty->print("[%d] count=%d { ", idx++, b->count()); 1985 if (!verbose) { 1986 nm->print_on(tty, "nmethod"); 1987 tty->print_cr(" } "); 1988 } else { 1989 nm->print(); 1990 nm->print_dependencies(); 1991 tty->print_cr("--- } "); 1992 } 1993 b = b->next(); 1994 } 1995 } 1996 1997 1998 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) { 1999 nmethodBucket* b = _dependencies; 2000 while (b != NULL) { 2001 if (nm == b->get_nmethod()) { 2002 #ifdef ASSERT 2003 int count = b->count(); 2004 assert(count >= 0, err_msg("count shouldn't be negative: %d", count)); 2005 #endif 2006 return true; 2007 } 2008 b = b->next(); 2009 } 2010 return false; 2011 } 2012 #endif //PRODUCT 2013 2014 2015 // Garbage collection 2016 2017 #ifdef ASSERT 2018 template <class T> void assert_is_in(T *p) { 2019 T heap_oop = oopDesc::load_heap_oop(p); 2020 if (!oopDesc::is_null(heap_oop)) { 2021 oop o = oopDesc::decode_heap_oop_not_null(heap_oop); 2022 assert(Universe::heap()->is_in(o), "should be in heap"); 2023 } 2024 } 2025 template <class T> void assert_is_in_closed_subset(T *p) { 2026 T heap_oop = oopDesc::load_heap_oop(p); 2027 if (!oopDesc::is_null(heap_oop)) { 2028 oop o = oopDesc::decode_heap_oop_not_null(heap_oop); 2029 assert(Universe::heap()->is_in_closed_subset(o), 2030 err_msg("should be in closed *p " INTPTR_FORMAT " " INTPTR_FORMAT, (address)p, (address)o)); 2031 } 2032 } 2033 template <class T> void assert_is_in_reserved(T *p) { 2034 T heap_oop = oopDesc::load_heap_oop(p); 2035 if (!oopDesc::is_null(heap_oop)) { 2036 oop o = oopDesc::decode_heap_oop_not_null(heap_oop); 2037 assert(Universe::heap()->is_in_reserved(o), "should be in reserved"); 2038 } 2039 } 2040 template <class T> void assert_nothing(T *p) {} 2041 2042 #else 2043 template <class T> void assert_is_in(T *p) {} 2044 template <class T> void assert_is_in_closed_subset(T *p) {} 2045 template <class T> void assert_is_in_reserved(T *p) {} 2046 template <class T> void assert_nothing(T *p) {} 2047 #endif // ASSERT 2048 2049 // 2050 // Macros that iterate over areas of oops which are specialized on type of 2051 // oop pointer either narrow or wide, depending on UseCompressedOops 2052 // 2053 // Parameters are: 2054 // T - type of oop to point to (either oop or narrowOop) 2055 // start_p - starting pointer for region to iterate over 2056 // count - number of oops or narrowOops to iterate over 2057 // do_oop - action to perform on each oop (it's arbitrary C code which 2058 // makes it more efficient to put in a macro rather than making 2059 // it a template function) 2060 // assert_fn - assert function which is template function because performance 2061 // doesn't matter when enabled. 2062 #define InstanceKlass_SPECIALIZED_OOP_ITERATE( \ 2063 T, start_p, count, do_oop, \ 2064 assert_fn) \ 2065 { \ 2066 T* p = (T*)(start_p); \ 2067 T* const end = p + (count); \ 2068 while (p < end) { \ 2069 (assert_fn)(p); \ 2070 do_oop; \ 2071 ++p; \ 2072 } \ 2073 } 2074 2075 #define InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE( \ 2076 T, start_p, count, do_oop, \ 2077 assert_fn) \ 2078 { \ 2079 T* const start = (T*)(start_p); \ 2080 T* p = start + (count); \ 2081 while (start < p) { \ 2082 --p; \ 2083 (assert_fn)(p); \ 2084 do_oop; \ 2085 } \ 2086 } 2087 2088 #define InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \ 2089 T, start_p, count, low, high, \ 2090 do_oop, assert_fn) \ 2091 { \ 2092 T* const l = (T*)(low); \ 2093 T* const h = (T*)(high); \ 2094 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \ 2095 mask_bits((intptr_t)h, sizeof(T)-1) == 0, \ 2096 "bounded region must be properly aligned"); \ 2097 T* p = (T*)(start_p); \ 2098 T* end = p + (count); \ 2099 if (p < l) p = l; \ 2100 if (end > h) end = h; \ 2101 while (p < end) { \ 2102 (assert_fn)(p); \ 2103 do_oop; \ 2104 ++p; \ 2105 } \ 2106 } 2107 2108 2109 // The following macros call specialized macros, passing either oop or 2110 // narrowOop as the specialization type. These test the UseCompressedOops 2111 // flag. 2112 #define InstanceKlass_OOP_MAP_ITERATE(obj, do_oop, assert_fn) \ 2113 { \ 2114 /* Compute oopmap block range. The common case \ 2115 is nonstatic_oop_map_size == 1. */ \ 2116 OopMapBlock* map = start_of_nonstatic_oop_maps(); \ 2117 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); \ 2118 if (UseCompressedOops) { \ 2119 while (map < end_map) { \ 2120 InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \ 2121 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \ 2122 do_oop, assert_fn) \ 2123 ++map; \ 2124 } \ 2125 } else { \ 2126 while (map < end_map) { \ 2127 InstanceKlass_SPECIALIZED_OOP_ITERATE(oop, \ 2128 obj->obj_field_addr<oop>(map->offset()), map->count(), \ 2129 do_oop, assert_fn) \ 2130 ++map; \ 2131 } \ 2132 } \ 2133 } 2134 2135 #define InstanceKlass_OOP_MAP_REVERSE_ITERATE(obj, do_oop, assert_fn) \ 2136 { \ 2137 OopMapBlock* const start_map = start_of_nonstatic_oop_maps(); \ 2138 OopMapBlock* map = start_map + nonstatic_oop_map_count(); \ 2139 if (UseCompressedOops) { \ 2140 while (start_map < map) { \ 2141 --map; \ 2142 InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(narrowOop, \ 2143 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \ 2144 do_oop, assert_fn) \ 2145 } \ 2146 } else { \ 2147 while (start_map < map) { \ 2148 --map; \ 2149 InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(oop, \ 2150 obj->obj_field_addr<oop>(map->offset()), map->count(), \ 2151 do_oop, assert_fn) \ 2152 } \ 2153 } \ 2154 } 2155 2156 #define InstanceKlass_BOUNDED_OOP_MAP_ITERATE(obj, low, high, do_oop, \ 2157 assert_fn) \ 2158 { \ 2159 /* Compute oopmap block range. The common case is \ 2160 nonstatic_oop_map_size == 1, so we accept the \ 2161 usually non-existent extra overhead of examining \ 2162 all the maps. */ \ 2163 OopMapBlock* map = start_of_nonstatic_oop_maps(); \ 2164 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); \ 2165 if (UseCompressedOops) { \ 2166 while (map < end_map) { \ 2167 InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \ 2168 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \ 2169 low, high, \ 2170 do_oop, assert_fn) \ 2171 ++map; \ 2172 } \ 2173 } else { \ 2174 while (map < end_map) { \ 2175 InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \ 2176 obj->obj_field_addr<oop>(map->offset()), map->count(), \ 2177 low, high, \ 2178 do_oop, assert_fn) \ 2179 ++map; \ 2180 } \ 2181 } \ 2182 } 2183 2184 void InstanceKlass::oop_follow_contents(oop obj) { 2185 assert(obj != NULL, "can't follow the content of NULL object"); 2186 MarkSweep::follow_klass(obj->klass()); 2187 InstanceKlass_OOP_MAP_ITERATE( \ 2188 obj, \ 2189 MarkSweep::mark_and_push(p), \ 2190 assert_is_in_closed_subset) 2191 } 2192 2193 #if INCLUDE_ALL_GCS 2194 void InstanceKlass::oop_follow_contents(ParCompactionManager* cm, 2195 oop obj) { 2196 assert(obj != NULL, "can't follow the content of NULL object"); 2197 PSParallelCompact::follow_klass(cm, obj->klass()); 2198 // Only mark the header and let the scan of the meta-data mark 2199 // everything else. 2200 InstanceKlass_OOP_MAP_ITERATE( \ 2201 obj, \ 2202 PSParallelCompact::mark_and_push(cm, p), \ 2203 assert_is_in) 2204 } 2205 #endif // INCLUDE_ALL_GCS 2206 2207 // closure's do_metadata() method dictates whether the given closure should be 2208 // applied to the klass ptr in the object header. 2209 2210 #define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \ 2211 \ 2212 int InstanceKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \ 2213 /* header */ \ 2214 if_do_metadata_checked(closure, nv_suffix) { \ 2215 closure->do_klass##nv_suffix(obj->klass()); \ 2216 } \ 2217 InstanceKlass_OOP_MAP_ITERATE( \ 2218 obj, \ 2219 (closure)->do_oop##nv_suffix(p), \ 2220 assert_is_in_closed_subset) \ 2221 return size_helper(); \ 2222 } 2223 2224 #if INCLUDE_ALL_GCS 2225 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \ 2226 \ 2227 int InstanceKlass::oop_oop_iterate_backwards##nv_suffix(oop obj, \ 2228 OopClosureType* closure) { \ 2229 assert_should_ignore_metadata(closure, nv_suffix); \ 2230 \ 2231 /* instance variables */ \ 2232 InstanceKlass_OOP_MAP_REVERSE_ITERATE( \ 2233 obj, \ 2234 (closure)->do_oop##nv_suffix(p), \ 2235 assert_is_in_closed_subset) \ 2236 return size_helper(); \ 2237 } 2238 #endif // INCLUDE_ALL_GCS 2239 2240 #define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \ 2241 \ 2242 int InstanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj, \ 2243 OopClosureType* closure, \ 2244 MemRegion mr) { \ 2245 if_do_metadata_checked(closure, nv_suffix) { \ 2246 if (mr.contains(obj)) { \ 2247 closure->do_klass##nv_suffix(obj->klass()); \ 2248 } \ 2249 } \ 2250 InstanceKlass_BOUNDED_OOP_MAP_ITERATE( \ 2251 obj, mr.start(), mr.end(), \ 2252 (closure)->do_oop##nv_suffix(p), \ 2253 assert_is_in_closed_subset) \ 2254 return size_helper(); \ 2255 } 2256 2257 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN) 2258 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN) 2259 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m) 2260 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN_m) 2261 #if INCLUDE_ALL_GCS 2262 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) 2263 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) 2264 #endif // INCLUDE_ALL_GCS 2265 2266 int InstanceKlass::oop_adjust_pointers(oop obj) { 2267 int size = size_helper(); 2268 InstanceKlass_OOP_MAP_ITERATE( \ 2269 obj, \ 2270 MarkSweep::adjust_pointer(p), \ 2271 assert_is_in) 2272 return size; 2273 } 2274 2275 #if INCLUDE_ALL_GCS 2276 void InstanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) { 2277 InstanceKlass_OOP_MAP_REVERSE_ITERATE( \ 2278 obj, \ 2279 if (PSScavenge::should_scavenge(p)) { \ 2280 pm->claim_or_forward_depth(p); \ 2281 }, \ 2282 assert_nothing ) 2283 } 2284 2285 int InstanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) { 2286 int size = size_helper(); 2287 InstanceKlass_OOP_MAP_ITERATE( \ 2288 obj, \ 2289 PSParallelCompact::adjust_pointer(p), \ 2290 assert_is_in) 2291 return size; 2292 } 2293 2294 #endif // INCLUDE_ALL_GCS 2295 2296 void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) { 2297 assert(class_loader_data()->is_alive(is_alive), "this klass should be live"); 2298 if (is_interface()) { 2299 if (ClassUnloading) { 2300 Klass* impl = implementor(); 2301 if (impl != NULL) { 2302 if (!impl->is_loader_alive(is_alive)) { 2303 // remove this guy 2304 Klass** klass = adr_implementor(); 2305 assert(klass != NULL, "null klass"); 2306 if (klass != NULL) { 2307 *klass = NULL; 2308 } 2309 } 2310 } 2311 } 2312 } 2313 } 2314 2315 void InstanceKlass::clean_method_data(BoolObjectClosure* is_alive) { 2316 for (int m = 0; m < methods()->length(); m++) { 2317 MethodData* mdo = methods()->at(m)->method_data(); 2318 if (mdo != NULL) { 2319 mdo->clean_method_data(is_alive); 2320 } 2321 } 2322 } 2323 2324 2325 static void remove_unshareable_in_class(Klass* k) { 2326 // remove klass's unshareable info 2327 k->remove_unshareable_info(); 2328 } 2329 2330 void InstanceKlass::remove_unshareable_info() { 2331 Klass::remove_unshareable_info(); 2332 // Unlink the class 2333 if (is_linked()) { 2334 unlink_class(); 2335 } 2336 init_implementor(); 2337 2338 constants()->remove_unshareable_info(); 2339 2340 for (int i = 0; i < methods()->length(); i++) { 2341 Method* m = methods()->at(i); 2342 m->remove_unshareable_info(); 2343 } 2344 2345 // do array classes also. 2346 array_klasses_do(remove_unshareable_in_class); 2347 } 2348 2349 static void restore_unshareable_in_class(Klass* k, TRAPS) { 2350 // Array classes have null protection domain. 2351 // --> see ArrayKlass::complete_create_array_klass() 2352 k->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK); 2353 } 2354 2355 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) { 2356 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK); 2357 instanceKlassHandle ik(THREAD, this); 2358 2359 Array<Method*>* methods = ik->methods(); 2360 int num_methods = methods->length(); 2361 for (int index2 = 0; index2 < num_methods; ++index2) { 2362 methodHandle m(THREAD, methods->at(index2)); 2363 m->restore_unshareable_info(CHECK); 2364 } 2365 if (JvmtiExport::has_redefined_a_class()) { 2366 // Reinitialize vtable because RedefineClasses may have changed some 2367 // entries in this vtable for super classes so the CDS vtable might 2368 // point to old or obsolete entries. RedefineClasses doesn't fix up 2369 // vtables in the shared system dictionary, only the main one. 2370 // It also redefines the itable too so fix that too. 2371 ResourceMark rm(THREAD); 2372 ik->vtable()->initialize_vtable(false, CHECK); 2373 ik->itable()->initialize_itable(false, CHECK); 2374 } 2375 2376 // restore constant pool resolved references 2377 ik->constants()->restore_unshareable_info(CHECK); 2378 2379 ik->array_klasses_do(restore_unshareable_in_class, CHECK); 2380 } 2381 2382 // returns true IFF is_in_error_state() has been changed as a result of this call. 2383 bool InstanceKlass::check_sharing_error_state() { 2384 assert(DumpSharedSpaces, "should only be called during dumping"); 2385 bool old_state = is_in_error_state(); 2386 2387 if (!is_in_error_state()) { 2388 bool bad = false; 2389 for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) { 2390 if (sup->is_in_error_state()) { 2391 bad = true; 2392 break; 2393 } 2394 } 2395 if (!bad) { 2396 Array<Klass*>* interfaces = transitive_interfaces(); 2397 for (int i = 0; i < interfaces->length(); i++) { 2398 Klass* iface = interfaces->at(i); 2399 if (InstanceKlass::cast(iface)->is_in_error_state()) { 2400 bad = true; 2401 break; 2402 } 2403 } 2404 } 2405 2406 if (bad) { 2407 set_in_error_state(); 2408 } 2409 } 2410 2411 return (old_state != is_in_error_state()); 2412 } 2413 2414 static void clear_all_breakpoints(Method* m) { 2415 m->clear_all_breakpoints(); 2416 } 2417 2418 2419 void InstanceKlass::notify_unload_class(InstanceKlass* ik) { 2420 // notify the debugger 2421 if (JvmtiExport::should_post_class_unload()) { 2422 JvmtiExport::post_class_unload(ik); 2423 } 2424 2425 // notify ClassLoadingService of class unload 2426 ClassLoadingService::notify_class_unloaded(ik); 2427 } 2428 2429 void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) { 2430 // Clean up C heap 2431 ik->release_C_heap_structures(); 2432 ik->constants()->release_C_heap_structures(); 2433 } 2434 2435 void InstanceKlass::release_C_heap_structures() { 2436 2437 // Can't release the constant pool here because the constant pool can be 2438 // deallocated separately from the InstanceKlass for default methods and 2439 // redefine classes. 2440 2441 // Deallocate oop map cache 2442 if (_oop_map_cache != NULL) { 2443 delete _oop_map_cache; 2444 _oop_map_cache = NULL; 2445 } 2446 2447 // Deallocate JNI identifiers for jfieldIDs 2448 JNIid::deallocate(jni_ids()); 2449 set_jni_ids(NULL); 2450 2451 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2452 if (jmeths != (jmethodID*)NULL) { 2453 release_set_methods_jmethod_ids(NULL); 2454 FreeHeap(jmeths); 2455 } 2456 2457 // Deallocate MemberNameTable 2458 { 2459 Mutex* lock_or_null = SafepointSynchronize::is_at_safepoint() ? NULL : MemberNameTable_lock; 2460 MutexLockerEx ml(lock_or_null, Mutex::_no_safepoint_check_flag); 2461 MemberNameTable* mnt = member_names(); 2462 if (mnt != NULL) { 2463 delete mnt; 2464 set_member_names(NULL); 2465 } 2466 } 2467 2468 // release dependencies 2469 nmethodBucket* b = _dependencies; 2470 _dependencies = NULL; 2471 while (b != NULL) { 2472 nmethodBucket* next = b->next(); 2473 delete b; 2474 b = next; 2475 } 2476 2477 // Deallocate breakpoint records 2478 if (breakpoints() != 0x0) { 2479 methods_do(clear_all_breakpoints); 2480 assert(breakpoints() == 0x0, "should have cleared breakpoints"); 2481 } 2482 2483 // deallocate the cached class file 2484 if (_cached_class_file != NULL) { 2485 os::free(_cached_class_file); 2486 _cached_class_file = NULL; 2487 } 2488 2489 // Decrement symbol reference counts associated with the unloaded class. 2490 if (_name != NULL) _name->decrement_refcount(); 2491 // unreference array name derived from this class name (arrays of an unloaded 2492 // class can't be referenced anymore). 2493 if (_array_name != NULL) _array_name->decrement_refcount(); 2494 if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension); 2495 2496 assert(_total_instanceKlass_count >= 1, "Sanity check"); 2497 Atomic::dec(&_total_instanceKlass_count); 2498 } 2499 2500 void InstanceKlass::set_source_debug_extension(char* array, int length) { 2501 if (array == NULL) { 2502 _source_debug_extension = NULL; 2503 } else { 2504 // Adding one to the attribute length in order to store a null terminator 2505 // character could cause an overflow because the attribute length is 2506 // already coded with an u4 in the classfile, but in practice, it's 2507 // unlikely to happen. 2508 assert((length+1) > length, "Overflow checking"); 2509 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass); 2510 for (int i = 0; i < length; i++) { 2511 sde[i] = array[i]; 2512 } 2513 sde[length] = '\0'; 2514 _source_debug_extension = sde; 2515 } 2516 } 2517 2518 address InstanceKlass::static_field_addr(int offset) { 2519 return (address)(offset + InstanceMirrorKlass::offset_of_static_fields() + cast_from_oop<intptr_t>(java_mirror())); 2520 } 2521 2522 2523 const char* InstanceKlass::signature_name() const { 2524 int hash_len = 0; 2525 char hash_buf[40]; 2526 2527 // If this is an anonymous class, append a hash to make the name unique 2528 if (is_anonymous()) { 2529 intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0; 2530 jio_snprintf(hash_buf, sizeof(hash_buf), "/" UINTX_FORMAT, (uintx)hash); 2531 hash_len = (int)strlen(hash_buf); 2532 } 2533 2534 // Get the internal name as a c string 2535 const char* src = (const char*) (name()->as_C_string()); 2536 const int src_length = (int)strlen(src); 2537 2538 char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3); 2539 2540 // Add L as type indicator 2541 int dest_index = 0; 2542 dest[dest_index++] = 'L'; 2543 2544 // Add the actual class name 2545 for (int src_index = 0; src_index < src_length; ) { 2546 dest[dest_index++] = src[src_index++]; 2547 } 2548 2549 // If we have a hash, append it 2550 for (int hash_index = 0; hash_index < hash_len; ) { 2551 dest[dest_index++] = hash_buf[hash_index++]; 2552 } 2553 2554 // Add the semicolon and the NULL 2555 dest[dest_index++] = ';'; 2556 dest[dest_index] = '\0'; 2557 return dest; 2558 } 2559 2560 // different verisons of is_same_class_package 2561 bool InstanceKlass::is_same_class_package(Klass* class2) { 2562 Klass* class1 = this; 2563 oop classloader1 = InstanceKlass::cast(class1)->class_loader(); 2564 Symbol* classname1 = class1->name(); 2565 2566 if (class2->oop_is_objArray()) { 2567 class2 = ObjArrayKlass::cast(class2)->bottom_klass(); 2568 } 2569 oop classloader2; 2570 if (class2->oop_is_instance()) { 2571 classloader2 = InstanceKlass::cast(class2)->class_loader(); 2572 } else { 2573 assert(class2->oop_is_typeArray(), "should be type array"); 2574 classloader2 = NULL; 2575 } 2576 Symbol* classname2 = class2->name(); 2577 2578 return InstanceKlass::is_same_class_package(classloader1, classname1, 2579 classloader2, classname2); 2580 } 2581 2582 bool InstanceKlass::is_same_class_package(oop classloader2, Symbol* classname2) { 2583 Klass* class1 = this; 2584 oop classloader1 = InstanceKlass::cast(class1)->class_loader(); 2585 Symbol* classname1 = class1->name(); 2586 2587 return InstanceKlass::is_same_class_package(classloader1, classname1, 2588 classloader2, classname2); 2589 } 2590 2591 // return true if two classes are in the same package, classloader 2592 // and classname information is enough to determine a class's package 2593 bool InstanceKlass::is_same_class_package(oop class_loader1, Symbol* class_name1, 2594 oop class_loader2, Symbol* class_name2) { 2595 if (class_loader1 != class_loader2) { 2596 return false; 2597 } else if (class_name1 == class_name2) { 2598 return true; // skip painful bytewise comparison 2599 } else { 2600 ResourceMark rm; 2601 2602 // The Symbol*'s are in UTF8 encoding. Since we only need to check explicitly 2603 // for ASCII characters ('/', 'L', '['), we can keep them in UTF8 encoding. 2604 // Otherwise, we just compare jbyte values between the strings. 2605 const jbyte *name1 = class_name1->base(); 2606 const jbyte *name2 = class_name2->base(); 2607 2608 const jbyte *last_slash1 = UTF8::strrchr(name1, class_name1->utf8_length(), '/'); 2609 const jbyte *last_slash2 = UTF8::strrchr(name2, class_name2->utf8_length(), '/'); 2610 2611 if ((last_slash1 == NULL) || (last_slash2 == NULL)) { 2612 // One of the two doesn't have a package. Only return true 2613 // if the other one also doesn't have a package. 2614 return last_slash1 == last_slash2; 2615 } else { 2616 // Skip over '['s 2617 if (*name1 == '[') { 2618 do { 2619 name1++; 2620 } while (*name1 == '['); 2621 if (*name1 != 'L') { 2622 // Something is terribly wrong. Shouldn't be here. 2623 return false; 2624 } 2625 } 2626 if (*name2 == '[') { 2627 do { 2628 name2++; 2629 } while (*name2 == '['); 2630 if (*name2 != 'L') { 2631 // Something is terribly wrong. Shouldn't be here. 2632 return false; 2633 } 2634 } 2635 2636 // Check that package part is identical 2637 int length1 = last_slash1 - name1; 2638 int length2 = last_slash2 - name2; 2639 2640 return UTF8::equal(name1, length1, name2, length2); 2641 } 2642 } 2643 } 2644 2645 // Returns true iff super_method can be overridden by a method in targetclassname 2646 // See JSL 3rd edition 8.4.6.1 2647 // Assumes name-signature match 2648 // "this" is InstanceKlass of super_method which must exist 2649 // note that the InstanceKlass of the method in the targetclassname has not always been created yet 2650 bool InstanceKlass::is_override(methodHandle super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) { 2651 // Private methods can not be overridden 2652 if (super_method->is_private()) { 2653 return false; 2654 } 2655 // If super method is accessible, then override 2656 if ((super_method->is_protected()) || 2657 (super_method->is_public())) { 2658 return true; 2659 } 2660 // Package-private methods are not inherited outside of package 2661 assert(super_method->is_package_private(), "must be package private"); 2662 return(is_same_class_package(targetclassloader(), targetclassname)); 2663 } 2664 2665 /* defined for now in jvm.cpp, for historical reasons *-- 2666 Klass* InstanceKlass::compute_enclosing_class_impl(instanceKlassHandle self, 2667 Symbol*& simple_name_result, TRAPS) { 2668 ... 2669 } 2670 */ 2671 2672 // tell if two classes have the same enclosing class (at package level) 2673 bool InstanceKlass::is_same_package_member_impl(instanceKlassHandle class1, 2674 Klass* class2_oop, TRAPS) { 2675 if (class2_oop == class1()) return true; 2676 if (!class2_oop->oop_is_instance()) return false; 2677 instanceKlassHandle class2(THREAD, class2_oop); 2678 2679 // must be in same package before we try anything else 2680 if (!class1->is_same_class_package(class2->class_loader(), class2->name())) 2681 return false; 2682 2683 // As long as there is an outer1.getEnclosingClass, 2684 // shift the search outward. 2685 instanceKlassHandle outer1 = class1; 2686 for (;;) { 2687 // As we walk along, look for equalities between outer1 and class2. 2688 // Eventually, the walks will terminate as outer1 stops 2689 // at the top-level class around the original class. 2690 bool ignore_inner_is_member; 2691 Klass* next = outer1->compute_enclosing_class(&ignore_inner_is_member, 2692 CHECK_false); 2693 if (next == NULL) break; 2694 if (next == class2()) return true; 2695 outer1 = instanceKlassHandle(THREAD, next); 2696 } 2697 2698 // Now do the same for class2. 2699 instanceKlassHandle outer2 = class2; 2700 for (;;) { 2701 bool ignore_inner_is_member; 2702 Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member, 2703 CHECK_false); 2704 if (next == NULL) break; 2705 // Might as well check the new outer against all available values. 2706 if (next == class1()) return true; 2707 if (next == outer1()) return true; 2708 outer2 = instanceKlassHandle(THREAD, next); 2709 } 2710 2711 // If by this point we have not found an equality between the 2712 // two classes, we know they are in separate package members. 2713 return false; 2714 } 2715 2716 2717 jint InstanceKlass::compute_modifier_flags(TRAPS) const { 2718 jint access = access_flags().as_int(); 2719 2720 // But check if it happens to be member class. 2721 instanceKlassHandle ik(THREAD, this); 2722 InnerClassesIterator iter(ik); 2723 for (; !iter.done(); iter.next()) { 2724 int ioff = iter.inner_class_info_index(); 2725 // Inner class attribute can be zero, skip it. 2726 // Strange but true: JVM spec. allows null inner class refs. 2727 if (ioff == 0) continue; 2728 2729 // only look at classes that are already loaded 2730 // since we are looking for the flags for our self. 2731 Symbol* inner_name = ik->constants()->klass_name_at(ioff); 2732 if ((ik->name() == inner_name)) { 2733 // This is really a member class. 2734 access = iter.inner_access_flags(); 2735 break; 2736 } 2737 } 2738 // Remember to strip ACC_SUPER bit 2739 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS; 2740 } 2741 2742 jint InstanceKlass::jvmti_class_status() const { 2743 jint result = 0; 2744 2745 if (is_linked()) { 2746 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED; 2747 } 2748 2749 if (is_initialized()) { 2750 assert(is_linked(), "Class status is not consistent"); 2751 result |= JVMTI_CLASS_STATUS_INITIALIZED; 2752 } 2753 if (is_in_error_state()) { 2754 result |= JVMTI_CLASS_STATUS_ERROR; 2755 } 2756 return result; 2757 } 2758 2759 Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) { 2760 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2761 int method_table_offset_in_words = ioe->offset()/wordSize; 2762 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 2763 / itableOffsetEntry::size(); 2764 2765 for (int cnt = 0 ; ; cnt ++, ioe ++) { 2766 // If the interface isn't implemented by the receiver class, 2767 // the VM should throw IncompatibleClassChangeError. 2768 if (cnt >= nof_interfaces) { 2769 THROW_NULL(vmSymbols::java_lang_IncompatibleClassChangeError()); 2770 } 2771 2772 Klass* ik = ioe->interface_klass(); 2773 if (ik == holder) break; 2774 } 2775 2776 itableMethodEntry* ime = ioe->first_method_entry(this); 2777 Method* m = ime[index].method(); 2778 if (m == NULL) { 2779 THROW_NULL(vmSymbols::java_lang_AbstractMethodError()); 2780 } 2781 return m; 2782 } 2783 2784 2785 #if INCLUDE_JVMTI 2786 // update default_methods for redefineclasses for methods that are 2787 // not yet in the vtable due to concurrent subclass define and superinterface 2788 // redefinition 2789 // Note: those in the vtable, should have been updated via adjust_method_entries 2790 void InstanceKlass::adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed) { 2791 // search the default_methods for uses of either obsolete or EMCP methods 2792 if (default_methods() != NULL) { 2793 for (int index = 0; index < default_methods()->length(); index ++) { 2794 Method* old_method = default_methods()->at(index); 2795 if (old_method == NULL || old_method->method_holder() != holder || !old_method->is_old()) { 2796 continue; // skip uninteresting entries 2797 } 2798 assert(!old_method->is_deleted(), "default methods may not be deleted"); 2799 2800 Method* new_method = holder->method_with_idnum(old_method->orig_method_idnum()); 2801 2802 assert(new_method != NULL, "method_with_idnum() should not be NULL"); 2803 assert(old_method != new_method, "sanity check"); 2804 2805 default_methods()->at_put(index, new_method); 2806 if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) { 2807 if (!(*trace_name_printed)) { 2808 // RC_TRACE_MESG macro has an embedded ResourceMark 2809 RC_TRACE_MESG(("adjust: klassname=%s default methods from name=%s", 2810 external_name(), 2811 old_method->method_holder()->external_name())); 2812 *trace_name_printed = true; 2813 } 2814 RC_TRACE(0x00100000, ("default method update: %s(%s) ", 2815 new_method->name()->as_C_string(), 2816 new_method->signature()->as_C_string())); 2817 } 2818 } 2819 } 2820 } 2821 #endif // INCLUDE_JVMTI 2822 2823 // On-stack replacement stuff 2824 void InstanceKlass::add_osr_nmethod(nmethod* n) { 2825 // only one compilation can be active 2826 { 2827 // This is a short non-blocking critical region, so the no safepoint check is ok. 2828 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2829 assert(n->is_osr_method(), "wrong kind of nmethod"); 2830 n->set_osr_link(osr_nmethods_head()); 2831 set_osr_nmethods_head(n); 2832 // Raise the highest osr level if necessary 2833 if (TieredCompilation) { 2834 Method* m = n->method(); 2835 m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level())); 2836 } 2837 } 2838 2839 // Get rid of the osr methods for the same bci that have lower levels. 2840 if (TieredCompilation) { 2841 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) { 2842 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true); 2843 if (inv != NULL && inv->is_in_use()) { 2844 inv->make_not_entrant(); 2845 } 2846 } 2847 } 2848 } 2849 2850 2851 void InstanceKlass::remove_osr_nmethod(nmethod* n) { 2852 // This is a short non-blocking critical region, so the no safepoint check is ok. 2853 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2854 assert(n->is_osr_method(), "wrong kind of nmethod"); 2855 nmethod* last = NULL; 2856 nmethod* cur = osr_nmethods_head(); 2857 int max_level = CompLevel_none; // Find the max comp level excluding n 2858 Method* m = n->method(); 2859 // Search for match 2860 while(cur != NULL && cur != n) { 2861 if (TieredCompilation && m == cur->method()) { 2862 // Find max level before n 2863 max_level = MAX2(max_level, cur->comp_level()); 2864 } 2865 last = cur; 2866 cur = cur->osr_link(); 2867 } 2868 nmethod* next = NULL; 2869 if (cur == n) { 2870 next = cur->osr_link(); 2871 if (last == NULL) { 2872 // Remove first element 2873 set_osr_nmethods_head(next); 2874 } else { 2875 last->set_osr_link(next); 2876 } 2877 } 2878 n->set_osr_link(NULL); 2879 if (TieredCompilation) { 2880 cur = next; 2881 while (cur != NULL) { 2882 // Find max level after n 2883 if (m == cur->method()) { 2884 max_level = MAX2(max_level, cur->comp_level()); 2885 } 2886 cur = cur->osr_link(); 2887 } 2888 m->set_highest_osr_comp_level(max_level); 2889 } 2890 } 2891 2892 int InstanceKlass::mark_osr_nmethods(const Method* m) { 2893 // This is a short non-blocking critical region, so the no safepoint check is ok. 2894 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2895 nmethod* osr = osr_nmethods_head(); 2896 int found = 0; 2897 while (osr != NULL) { 2898 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2899 if (osr->method() == m) { 2900 osr->mark_for_deoptimization(); 2901 found++; 2902 } 2903 osr = osr->osr_link(); 2904 } 2905 return found; 2906 } 2907 2908 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { 2909 // This is a short non-blocking critical region, so the no safepoint check is ok. 2910 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2911 nmethod* osr = osr_nmethods_head(); 2912 nmethod* best = NULL; 2913 while (osr != NULL) { 2914 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2915 // There can be a time when a c1 osr method exists but we are waiting 2916 // for a c2 version. When c2 completes its osr nmethod we will trash 2917 // the c1 version and only be able to find the c2 version. However 2918 // while we overflow in the c1 code at back branches we don't want to 2919 // try and switch to the same code as we are already running 2920 2921 if (osr->method() == m && 2922 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) { 2923 if (match_level) { 2924 if (osr->comp_level() == comp_level) { 2925 // Found a match - return it. 2926 return osr; 2927 } 2928 } else { 2929 if (best == NULL || (osr->comp_level() > best->comp_level())) { 2930 if (osr->comp_level() == CompLevel_highest_tier) { 2931 // Found the best possible - return it. 2932 return osr; 2933 } 2934 best = osr; 2935 } 2936 } 2937 } 2938 osr = osr->osr_link(); 2939 } 2940 if (best != NULL && best->comp_level() >= comp_level && match_level == false) { 2941 return best; 2942 } 2943 return NULL; 2944 } 2945 2946 bool InstanceKlass::add_member_name(Handle mem_name) { 2947 jweak mem_name_wref = JNIHandles::make_weak_global(mem_name); 2948 MutexLocker ml(MemberNameTable_lock); 2949 DEBUG_ONLY(No_Safepoint_Verifier nsv); 2950 2951 // Check if method has been redefined while taking out MemberNameTable_lock, if so 2952 // return false. We cannot cache obsolete methods. They will crash when the function 2953 // is called! 2954 Method* method = (Method*)java_lang_invoke_MemberName::vmtarget(mem_name()); 2955 if (method->is_obsolete()) { 2956 return false; 2957 } else if (method->is_old()) { 2958 // Replace method with redefined version 2959 java_lang_invoke_MemberName::set_vmtarget(mem_name(), method_with_idnum(method->method_idnum())); 2960 } 2961 2962 if (_member_names == NULL) { 2963 _member_names = new (ResourceObj::C_HEAP, mtClass) MemberNameTable(idnum_allocated_count()); 2964 } 2965 _member_names->add_member_name(mem_name_wref); 2966 return true; 2967 } 2968 2969 // ----------------------------------------------------------------------------------------------------- 2970 // Printing 2971 2972 #ifndef PRODUCT 2973 2974 #define BULLET " - " 2975 2976 static const char* state_names[] = { 2977 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error" 2978 }; 2979 2980 static void print_vtable(intptr_t* start, int len, outputStream* st) { 2981 for (int i = 0; i < len; i++) { 2982 intptr_t e = start[i]; 2983 st->print("%d : " INTPTR_FORMAT, i, e); 2984 if (e != 0 && ((Metadata*)e)->is_metaspace_object()) { 2985 st->print(" "); 2986 ((Metadata*)e)->print_value_on(st); 2987 } 2988 st->cr(); 2989 } 2990 } 2991 2992 void InstanceKlass::print_on(outputStream* st) const { 2993 assert(is_klass(), "must be klass"); 2994 Klass::print_on(st); 2995 2996 st->print(BULLET"instance size: %d", size_helper()); st->cr(); 2997 st->print(BULLET"klass size: %d", size()); st->cr(); 2998 st->print(BULLET"access: "); access_flags().print_on(st); st->cr(); 2999 st->print(BULLET"state: "); st->print_cr("%s", state_names[_init_state]); 3000 st->print(BULLET"name: "); name()->print_value_on(st); st->cr(); 3001 st->print(BULLET"super: "); super()->print_value_on_maybe_null(st); st->cr(); 3002 st->print(BULLET"sub: "); 3003 Klass* sub = subklass(); 3004 int n; 3005 for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) { 3006 if (n < MaxSubklassPrintSize) { 3007 sub->print_value_on(st); 3008 st->print(" "); 3009 } 3010 } 3011 if (n >= MaxSubklassPrintSize) st->print("(%d more klasses...)", n - MaxSubklassPrintSize); 3012 st->cr(); 3013 3014 if (is_interface()) { 3015 st->print_cr(BULLET"nof implementors: %d", nof_implementors()); 3016 if (nof_implementors() == 1) { 3017 st->print_cr(BULLET"implementor: "); 3018 st->print(" "); 3019 implementor()->print_value_on(st); 3020 st->cr(); 3021 } 3022 } 3023 3024 st->print(BULLET"arrays: "); array_klasses()->print_value_on_maybe_null(st); st->cr(); 3025 st->print(BULLET"methods: "); methods()->print_value_on(st); st->cr(); 3026 if (Verbose || WizardMode) { 3027 Array<Method*>* method_array = methods(); 3028 for (int i = 0; i < method_array->length(); i++) { 3029 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr(); 3030 } 3031 } 3032 st->print(BULLET"method ordering: "); method_ordering()->print_value_on(st); st->cr(); 3033 st->print(BULLET"default_methods: "); default_methods()->print_value_on(st); st->cr(); 3034 if (Verbose && default_methods() != NULL) { 3035 Array<Method*>* method_array = default_methods(); 3036 for (int i = 0; i < method_array->length(); i++) { 3037 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr(); 3038 } 3039 } 3040 if (default_vtable_indices() != NULL) { 3041 st->print(BULLET"default vtable indices: "); default_vtable_indices()->print_value_on(st); st->cr(); 3042 } 3043 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr(); 3044 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr(); 3045 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr(); 3046 if (class_loader_data() != NULL) { 3047 st->print(BULLET"class loader data: "); 3048 class_loader_data()->print_value_on(st); 3049 st->cr(); 3050 } 3051 st->print(BULLET"host class: "); host_klass()->print_value_on_maybe_null(st); st->cr(); 3052 if (source_file_name() != NULL) { 3053 st->print(BULLET"source file: "); 3054 source_file_name()->print_value_on(st); 3055 st->cr(); 3056 } 3057 if (source_debug_extension() != NULL) { 3058 st->print(BULLET"source debug extension: "); 3059 st->print("%s", source_debug_extension()); 3060 st->cr(); 3061 } 3062 st->print(BULLET"class annotations: "); class_annotations()->print_value_on(st); st->cr(); 3063 st->print(BULLET"class type annotations: "); class_type_annotations()->print_value_on(st); st->cr(); 3064 st->print(BULLET"field annotations: "); fields_annotations()->print_value_on(st); st->cr(); 3065 st->print(BULLET"field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr(); 3066 { 3067 bool have_pv = false; 3068 // previous versions are linked together through the InstanceKlass 3069 for (InstanceKlass* pv_node = _previous_versions; 3070 pv_node != NULL; 3071 pv_node = pv_node->previous_versions()) { 3072 if (!have_pv) 3073 st->print(BULLET"previous version: "); 3074 have_pv = true; 3075 pv_node->constants()->print_value_on(st); 3076 } 3077 if (have_pv) st->cr(); 3078 } 3079 3080 if (generic_signature() != NULL) { 3081 st->print(BULLET"generic signature: "); 3082 generic_signature()->print_value_on(st); 3083 st->cr(); 3084 } 3085 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr(); 3086 st->print(BULLET"java mirror: "); java_mirror()->print_value_on(st); st->cr(); 3087 st->print(BULLET"vtable length %d (start addr: " INTPTR_FORMAT ")", vtable_length(), start_of_vtable()); st->cr(); 3088 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st); 3089 st->print(BULLET"itable length %d (start addr: " INTPTR_FORMAT ")", itable_length(), start_of_itable()); st->cr(); 3090 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_itable(), itable_length(), st); 3091 st->print_cr(BULLET"---- static fields (%d words):", static_field_size()); 3092 FieldPrinter print_static_field(st); 3093 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field); 3094 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size()); 3095 FieldPrinter print_nonstatic_field(st); 3096 ((InstanceKlass*)this)->do_nonstatic_fields(&print_nonstatic_field); 3097 3098 st->print(BULLET"non-static oop maps: "); 3099 OopMapBlock* map = start_of_nonstatic_oop_maps(); 3100 OopMapBlock* end_map = map + nonstatic_oop_map_count(); 3101 while (map < end_map) { 3102 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1)); 3103 map++; 3104 } 3105 st->cr(); 3106 } 3107 3108 #endif //PRODUCT 3109 3110 void InstanceKlass::print_value_on(outputStream* st) const { 3111 assert(is_klass(), "must be klass"); 3112 if (Verbose || WizardMode) access_flags().print_on(st); 3113 name()->print_value_on(st); 3114 } 3115 3116 #ifndef PRODUCT 3117 3118 void FieldPrinter::do_field(fieldDescriptor* fd) { 3119 _st->print(BULLET); 3120 if (_obj == NULL) { 3121 fd->print_on(_st); 3122 _st->cr(); 3123 } else { 3124 fd->print_on_for(_st, _obj); 3125 _st->cr(); 3126 } 3127 } 3128 3129 3130 void InstanceKlass::oop_print_on(oop obj, outputStream* st) { 3131 Klass::oop_print_on(obj, st); 3132 3133 if (this == SystemDictionary::String_klass()) { 3134 typeArrayOop value = java_lang_String::value(obj); 3135 juint offset = java_lang_String::offset(obj); 3136 juint length = java_lang_String::length(obj); 3137 if (value != NULL && 3138 value->is_typeArray() && 3139 offset <= (juint) value->length() && 3140 offset + length <= (juint) value->length()) { 3141 st->print(BULLET"string: "); 3142 java_lang_String::print(obj, st); 3143 st->cr(); 3144 if (!WizardMode) return; // that is enough 3145 } 3146 } 3147 3148 st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj)); 3149 FieldPrinter print_field(st, obj); 3150 do_nonstatic_fields(&print_field); 3151 3152 if (this == SystemDictionary::Class_klass()) { 3153 st->print(BULLET"signature: "); 3154 java_lang_Class::print_signature(obj, st); 3155 st->cr(); 3156 Klass* mirrored_klass = java_lang_Class::as_Klass(obj); 3157 st->print(BULLET"fake entry for mirror: "); 3158 mirrored_klass->print_value_on_maybe_null(st); 3159 st->cr(); 3160 Klass* array_klass = java_lang_Class::array_klass(obj); 3161 st->print(BULLET"fake entry for array: "); 3162 array_klass->print_value_on_maybe_null(st); 3163 st->cr(); 3164 st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj)); 3165 st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj)); 3166 Klass* real_klass = java_lang_Class::as_Klass(obj); 3167 if (real_klass != NULL && real_klass->oop_is_instance()) { 3168 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field); 3169 } 3170 } else if (this == SystemDictionary::MethodType_klass()) { 3171 st->print(BULLET"signature: "); 3172 java_lang_invoke_MethodType::print_signature(obj, st); 3173 st->cr(); 3174 } 3175 } 3176 3177 #endif //PRODUCT 3178 3179 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) { 3180 st->print("a "); 3181 name()->print_value_on(st); 3182 obj->print_address_on(st); 3183 if (this == SystemDictionary::String_klass() 3184 && java_lang_String::value(obj) != NULL) { 3185 ResourceMark rm; 3186 int len = java_lang_String::length(obj); 3187 int plen = (len < 24 ? len : 12); 3188 char* str = java_lang_String::as_utf8_string(obj, 0, plen); 3189 st->print(" = \"%s\"", str); 3190 if (len > plen) 3191 st->print("...[%d]", len); 3192 } else if (this == SystemDictionary::Class_klass()) { 3193 Klass* k = java_lang_Class::as_Klass(obj); 3194 st->print(" = "); 3195 if (k != NULL) { 3196 k->print_value_on(st); 3197 } else { 3198 const char* tname = type2name(java_lang_Class::primitive_type(obj)); 3199 st->print("%s", tname ? tname : "type?"); 3200 } 3201 } else if (this == SystemDictionary::MethodType_klass()) { 3202 st->print(" = "); 3203 java_lang_invoke_MethodType::print_signature(obj, st); 3204 } else if (java_lang_boxing_object::is_instance(obj)) { 3205 st->print(" = "); 3206 java_lang_boxing_object::print(obj, st); 3207 } else if (this == SystemDictionary::LambdaForm_klass()) { 3208 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj); 3209 if (vmentry != NULL) { 3210 st->print(" => "); 3211 vmentry->print_value_on(st); 3212 } 3213 } else if (this == SystemDictionary::MemberName_klass()) { 3214 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj); 3215 if (vmtarget != NULL) { 3216 st->print(" = "); 3217 vmtarget->print_value_on(st); 3218 } else { 3219 java_lang_invoke_MemberName::clazz(obj)->print_value_on(st); 3220 st->print("."); 3221 java_lang_invoke_MemberName::name(obj)->print_value_on(st); 3222 } 3223 } 3224 } 3225 3226 const char* InstanceKlass::internal_name() const { 3227 return external_name(); 3228 } 3229 3230 #if INCLUDE_SERVICES 3231 // Size Statistics 3232 void InstanceKlass::collect_statistics(KlassSizeStats *sz) const { 3233 Klass::collect_statistics(sz); 3234 3235 sz->_inst_size = HeapWordSize * size_helper(); 3236 sz->_vtab_bytes = HeapWordSize * align_object_offset(vtable_length()); 3237 sz->_itab_bytes = HeapWordSize * align_object_offset(itable_length()); 3238 sz->_nonstatic_oopmap_bytes = HeapWordSize * 3239 ((is_interface() || is_anonymous()) ? 3240 align_object_offset(nonstatic_oop_map_size()) : 3241 nonstatic_oop_map_size()); 3242 3243 int n = 0; 3244 n += (sz->_methods_array_bytes = sz->count_array(methods())); 3245 n += (sz->_method_ordering_bytes = sz->count_array(method_ordering())); 3246 n += (sz->_local_interfaces_bytes = sz->count_array(local_interfaces())); 3247 n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces())); 3248 n += (sz->_fields_bytes = sz->count_array(fields())); 3249 n += (sz->_inner_classes_bytes = sz->count_array(inner_classes())); 3250 sz->_ro_bytes += n; 3251 3252 const ConstantPool* cp = constants(); 3253 if (cp) { 3254 cp->collect_statistics(sz); 3255 } 3256 3257 const Annotations* anno = annotations(); 3258 if (anno) { 3259 anno->collect_statistics(sz); 3260 } 3261 3262 const Array<Method*>* methods_array = methods(); 3263 if (methods()) { 3264 for (int i = 0; i < methods_array->length(); i++) { 3265 Method* method = methods_array->at(i); 3266 if (method) { 3267 sz->_method_count ++; 3268 method->collect_statistics(sz); 3269 } 3270 } 3271 } 3272 } 3273 #endif // INCLUDE_SERVICES 3274 3275 // Verification 3276 3277 class VerifyFieldClosure: public OopClosure { 3278 protected: 3279 template <class T> void do_oop_work(T* p) { 3280 oop obj = oopDesc::load_decode_heap_oop(p); 3281 if (!obj->is_oop_or_null()) { 3282 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p, (address)obj); 3283 Universe::print(); 3284 guarantee(false, "boom"); 3285 } 3286 } 3287 public: 3288 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); } 3289 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); } 3290 }; 3291 3292 void InstanceKlass::verify_on(outputStream* st) { 3293 #ifndef PRODUCT 3294 // Avoid redundant verifies, this really should be in product. 3295 if (_verify_count == Universe::verify_count()) return; 3296 _verify_count = Universe::verify_count(); 3297 #endif 3298 3299 // Verify Klass 3300 Klass::verify_on(st); 3301 3302 // Verify that klass is present in ClassLoaderData 3303 guarantee(class_loader_data()->contains_klass(this), 3304 "this class isn't found in class loader data"); 3305 3306 // Verify vtables 3307 if (is_linked()) { 3308 ResourceMark rm; 3309 // $$$ This used to be done only for m/s collections. Doing it 3310 // always seemed a valid generalization. (DLD -- 6/00) 3311 vtable()->verify(st); 3312 } 3313 3314 // Verify first subklass 3315 if (subklass() != NULL) { 3316 guarantee(subklass()->is_klass(), "should be klass"); 3317 } 3318 3319 // Verify siblings 3320 Klass* super = this->super(); 3321 Klass* sib = next_sibling(); 3322 if (sib != NULL) { 3323 if (sib == this) { 3324 fatal(err_msg("subclass points to itself " PTR_FORMAT, sib)); 3325 } 3326 3327 guarantee(sib->is_klass(), "should be klass"); 3328 guarantee(sib->super() == super, "siblings should have same superklass"); 3329 } 3330 3331 // Verify implementor fields 3332 Klass* im = implementor(); 3333 if (im != NULL) { 3334 guarantee(is_interface(), "only interfaces should have implementor set"); 3335 guarantee(im->is_klass(), "should be klass"); 3336 guarantee(!im->is_interface() || im == this, 3337 "implementors cannot be interfaces"); 3338 } 3339 3340 // Verify local interfaces 3341 if (local_interfaces()) { 3342 Array<Klass*>* local_interfaces = this->local_interfaces(); 3343 for (int j = 0; j < local_interfaces->length(); j++) { 3344 Klass* e = local_interfaces->at(j); 3345 guarantee(e->is_klass() && e->is_interface(), "invalid local interface"); 3346 } 3347 } 3348 3349 // Verify transitive interfaces 3350 if (transitive_interfaces() != NULL) { 3351 Array<Klass*>* transitive_interfaces = this->transitive_interfaces(); 3352 for (int j = 0; j < transitive_interfaces->length(); j++) { 3353 Klass* e = transitive_interfaces->at(j); 3354 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface"); 3355 } 3356 } 3357 3358 // Verify methods 3359 if (methods() != NULL) { 3360 Array<Method*>* methods = this->methods(); 3361 for (int j = 0; j < methods->length(); j++) { 3362 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3363 } 3364 for (int j = 0; j < methods->length() - 1; j++) { 3365 Method* m1 = methods->at(j); 3366 Method* m2 = methods->at(j + 1); 3367 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3368 } 3369 } 3370 3371 // Verify method ordering 3372 if (method_ordering() != NULL) { 3373 Array<int>* method_ordering = this->method_ordering(); 3374 int length = method_ordering->length(); 3375 if (JvmtiExport::can_maintain_original_method_order() || 3376 ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) { 3377 guarantee(length == methods()->length(), "invalid method ordering length"); 3378 jlong sum = 0; 3379 for (int j = 0; j < length; j++) { 3380 int original_index = method_ordering->at(j); 3381 guarantee(original_index >= 0, "invalid method ordering index"); 3382 guarantee(original_index < length, "invalid method ordering index"); 3383 sum += original_index; 3384 } 3385 // Verify sum of indices 0,1,...,length-1 3386 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum"); 3387 } else { 3388 guarantee(length == 0, "invalid method ordering length"); 3389 } 3390 } 3391 3392 // Verify default methods 3393 if (default_methods() != NULL) { 3394 Array<Method*>* methods = this->default_methods(); 3395 for (int j = 0; j < methods->length(); j++) { 3396 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3397 } 3398 for (int j = 0; j < methods->length() - 1; j++) { 3399 Method* m1 = methods->at(j); 3400 Method* m2 = methods->at(j + 1); 3401 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3402 } 3403 } 3404 3405 // Verify JNI static field identifiers 3406 if (jni_ids() != NULL) { 3407 jni_ids()->verify(this); 3408 } 3409 3410 // Verify other fields 3411 if (array_klasses() != NULL) { 3412 guarantee(array_klasses()->is_klass(), "should be klass"); 3413 } 3414 if (constants() != NULL) { 3415 guarantee(constants()->is_constantPool(), "should be constant pool"); 3416 } 3417 const Klass* host = host_klass(); 3418 if (host != NULL) { 3419 guarantee(host->is_klass(), "should be klass"); 3420 } 3421 } 3422 3423 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) { 3424 Klass::oop_verify_on(obj, st); 3425 VerifyFieldClosure blk; 3426 obj->oop_iterate_no_header(&blk); 3427 } 3428 3429 3430 // JNIid class for jfieldIDs only 3431 // Note to reviewers: 3432 // These JNI functions are just moved over to column 1 and not changed 3433 // in the compressed oops workspace. 3434 JNIid::JNIid(Klass* holder, int offset, JNIid* next) { 3435 _holder = holder; 3436 _offset = offset; 3437 _next = next; 3438 debug_only(_is_static_field_id = false;) 3439 } 3440 3441 3442 JNIid* JNIid::find(int offset) { 3443 JNIid* current = this; 3444 while (current != NULL) { 3445 if (current->offset() == offset) return current; 3446 current = current->next(); 3447 } 3448 return NULL; 3449 } 3450 3451 void JNIid::deallocate(JNIid* current) { 3452 while (current != NULL) { 3453 JNIid* next = current->next(); 3454 delete current; 3455 current = next; 3456 } 3457 } 3458 3459 3460 void JNIid::verify(Klass* holder) { 3461 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields(); 3462 int end_field_offset; 3463 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize); 3464 3465 JNIid* current = this; 3466 while (current != NULL) { 3467 guarantee(current->holder() == holder, "Invalid klass in JNIid"); 3468 #ifdef ASSERT 3469 int o = current->offset(); 3470 if (current->is_static_field_id()) { 3471 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid"); 3472 } 3473 #endif 3474 current = current->next(); 3475 } 3476 } 3477 3478 3479 #ifdef ASSERT 3480 void InstanceKlass::set_init_state(ClassState state) { 3481 bool good_state = is_shared() ? (_init_state <= state) 3482 : (_init_state < state); 3483 assert(good_state || state == allocated, "illegal state transition"); 3484 _init_state = (u1)state; 3485 } 3486 #endif 3487 3488 3489 3490 // RedefineClasses() support for previous versions: 3491 int InstanceKlass::_previous_version_count = 0; 3492 3493 // Purge previous versions before adding new previous versions of the class. 3494 void InstanceKlass::purge_previous_versions(InstanceKlass* ik) { 3495 if (ik->previous_versions() != NULL) { 3496 // This klass has previous versions so see what we can cleanup 3497 // while it is safe to do so. 3498 3499 int deleted_count = 0; // leave debugging breadcrumbs 3500 int live_count = 0; 3501 ClassLoaderData* loader_data = ik->class_loader_data(); 3502 assert(loader_data != NULL, "should never be null"); 3503 3504 // RC_TRACE macro has an embedded ResourceMark 3505 RC_TRACE(0x00000200, ("purge: %s: previous versions", ik->external_name())); 3506 3507 // previous versions are linked together through the InstanceKlass 3508 InstanceKlass* pv_node = ik->previous_versions(); 3509 InstanceKlass* last = ik; 3510 int version = 0; 3511 3512 // check the previous versions list 3513 for (; pv_node != NULL; ) { 3514 3515 ConstantPool* pvcp = pv_node->constants(); 3516 assert(pvcp != NULL, "cp ref was unexpectedly cleared"); 3517 3518 if (!pvcp->on_stack()) { 3519 // If the constant pool isn't on stack, none of the methods 3520 // are executing. Unlink this previous_version. 3521 // The previous version InstanceKlass is on the ClassLoaderData deallocate list 3522 // so will be deallocated during the next phase of class unloading. 3523 RC_TRACE(0x00000200, ("purge: previous version " INTPTR_FORMAT " is dead", 3524 pv_node)); 3525 // For debugging purposes. 3526 pv_node->set_is_scratch_class(); 3527 pv_node->class_loader_data()->add_to_deallocate_list(pv_node); 3528 pv_node = pv_node->previous_versions(); 3529 last->link_previous_versions(pv_node); 3530 deleted_count++; 3531 version++; 3532 continue; 3533 } else { 3534 RC_TRACE(0x00000200, ("purge: previous version " INTPTR_FORMAT " is alive", 3535 pv_node)); 3536 assert(pvcp->pool_holder() != NULL, "Constant pool with no holder"); 3537 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack"); 3538 live_count++; 3539 } 3540 3541 // At least one method is live in this previous version. 3542 // Reset dead EMCP methods not to get breakpoints. 3543 // All methods are deallocated when all of the methods for this class are no 3544 // longer running. 3545 Array<Method*>* method_refs = pv_node->methods(); 3546 if (method_refs != NULL) { 3547 RC_TRACE(0x00000200, ("purge: previous methods length=%d", 3548 method_refs->length())); 3549 for (int j = 0; j < method_refs->length(); j++) { 3550 Method* method = method_refs->at(j); 3551 3552 if (!method->on_stack()) { 3553 // no breakpoints for non-running methods 3554 if (method->is_running_emcp()) { 3555 method->set_running_emcp(false); 3556 } 3557 } else { 3558 assert (method->is_obsolete() || method->is_running_emcp(), 3559 "emcp method cannot run after emcp bit is cleared"); 3560 // RC_TRACE macro has an embedded ResourceMark 3561 RC_TRACE(0x00000200, 3562 ("purge: %s(%s): prev method @%d in version @%d is alive", 3563 method->name()->as_C_string(), 3564 method->signature()->as_C_string(), j, version)); 3565 } 3566 } 3567 } 3568 // next previous version 3569 last = pv_node; 3570 pv_node = pv_node->previous_versions(); 3571 version++; 3572 } 3573 RC_TRACE(0x00000200, 3574 ("purge: previous version stats: live=%d, deleted=%d", live_count, 3575 deleted_count)); 3576 } 3577 } 3578 3579 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods, 3580 int emcp_method_count) { 3581 int obsolete_method_count = old_methods->length() - emcp_method_count; 3582 3583 if (emcp_method_count != 0 && obsolete_method_count != 0 && 3584 _previous_versions != NULL) { 3585 // We have a mix of obsolete and EMCP methods so we have to 3586 // clear out any matching EMCP method entries the hard way. 3587 int local_count = 0; 3588 for (int i = 0; i < old_methods->length(); i++) { 3589 Method* old_method = old_methods->at(i); 3590 if (old_method->is_obsolete()) { 3591 // only obsolete methods are interesting 3592 Symbol* m_name = old_method->name(); 3593 Symbol* m_signature = old_method->signature(); 3594 3595 // previous versions are linked together through the InstanceKlass 3596 int j = 0; 3597 for (InstanceKlass* prev_version = _previous_versions; 3598 prev_version != NULL; 3599 prev_version = prev_version->previous_versions(), j++) { 3600 3601 Array<Method*>* method_refs = prev_version->methods(); 3602 for (int k = 0; k < method_refs->length(); k++) { 3603 Method* method = method_refs->at(k); 3604 3605 if (!method->is_obsolete() && 3606 method->name() == m_name && 3607 method->signature() == m_signature) { 3608 // The current RedefineClasses() call has made all EMCP 3609 // versions of this method obsolete so mark it as obsolete 3610 RC_TRACE(0x00000400, 3611 ("add: %s(%s): flush obsolete method @%d in version @%d", 3612 m_name->as_C_string(), m_signature->as_C_string(), k, j)); 3613 3614 method->set_is_obsolete(); 3615 break; 3616 } 3617 } 3618 3619 // The previous loop may not find a matching EMCP method, but 3620 // that doesn't mean that we can optimize and not go any 3621 // further back in the PreviousVersion generations. The EMCP 3622 // method for this generation could have already been made obsolete, 3623 // but there still may be an older EMCP method that has not 3624 // been made obsolete. 3625 } 3626 3627 if (++local_count >= obsolete_method_count) { 3628 // no more obsolete methods so bail out now 3629 break; 3630 } 3631 } 3632 } 3633 } 3634 } 3635 3636 // Save the scratch_class as the previous version if any of the methods are running. 3637 // The previous_versions are used to set breakpoints in EMCP methods and they are 3638 // also used to clean MethodData links to redefined methods that are no longer running. 3639 void InstanceKlass::add_previous_version(instanceKlassHandle scratch_class, 3640 int emcp_method_count) { 3641 assert(Thread::current()->is_VM_thread(), 3642 "only VMThread can add previous versions"); 3643 3644 // RC_TRACE macro has an embedded ResourceMark 3645 RC_TRACE(0x00000400, ("adding previous version ref for %s, EMCP_cnt=%d", 3646 scratch_class->external_name(), emcp_method_count)); 3647 3648 // Clean out old previous versions 3649 purge_previous_versions(this); 3650 3651 // Mark newly obsolete methods in remaining previous versions. An EMCP method from 3652 // a previous redefinition may be made obsolete by this redefinition. 3653 Array<Method*>* old_methods = scratch_class->methods(); 3654 mark_newly_obsolete_methods(old_methods, emcp_method_count); 3655 3656 // If the constant pool for this previous version of the class 3657 // is not marked as being on the stack, then none of the methods 3658 // in this previous version of the class are on the stack so 3659 // we don't need to add this as a previous version. 3660 ConstantPool* cp_ref = scratch_class->constants(); 3661 if (!cp_ref->on_stack()) { 3662 RC_TRACE(0x00000400, ("add: scratch class not added; no methods are running")); 3663 // For debugging purposes. 3664 scratch_class->set_is_scratch_class(); 3665 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class()); 3666 // Update count for class unloading. 3667 _previous_version_count--; 3668 return; 3669 } 3670 3671 if (emcp_method_count != 0) { 3672 // At least one method is still running, check for EMCP methods 3673 for (int i = 0; i < old_methods->length(); i++) { 3674 Method* old_method = old_methods->at(i); 3675 if (!old_method->is_obsolete() && old_method->on_stack()) { 3676 // if EMCP method (not obsolete) is on the stack, mark as EMCP so that 3677 // we can add breakpoints for it. 3678 3679 // We set the method->on_stack bit during safepoints for class redefinition 3680 // and use this bit to set the is_running_emcp bit. 3681 // After the safepoint, the on_stack bit is cleared and the running emcp 3682 // method may exit. If so, we would set a breakpoint in a method that 3683 // is never reached, but this won't be noticeable to the programmer. 3684 old_method->set_running_emcp(true); 3685 RC_TRACE(0x00000400, ("add: EMCP method %s is on_stack " INTPTR_FORMAT, 3686 old_method->name_and_sig_as_C_string(), old_method)); 3687 } else if (!old_method->is_obsolete()) { 3688 RC_TRACE(0x00000400, ("add: EMCP method %s is NOT on_stack " INTPTR_FORMAT, 3689 old_method->name_and_sig_as_C_string(), old_method)); 3690 } 3691 } 3692 } 3693 3694 // Add previous version if any methods are still running. 3695 RC_TRACE(0x00000400, ("add: scratch class added; one of its methods is on_stack")); 3696 assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version"); 3697 scratch_class->link_previous_versions(previous_versions()); 3698 link_previous_versions(scratch_class()); 3699 // Update count for class unloading. 3700 _previous_version_count++; 3701 } // end add_previous_version() 3702 3703 3704 Method* InstanceKlass::method_with_idnum(int idnum) { 3705 Method* m = NULL; 3706 if (idnum < methods()->length()) { 3707 m = methods()->at(idnum); 3708 } 3709 if (m == NULL || m->method_idnum() != idnum) { 3710 for (int index = 0; index < methods()->length(); ++index) { 3711 m = methods()->at(index); 3712 if (m->method_idnum() == idnum) { 3713 return m; 3714 } 3715 } 3716 // None found, return null for the caller to handle. 3717 return NULL; 3718 } 3719 return m; 3720 } 3721 3722 jint InstanceKlass::get_cached_class_file_len() { 3723 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file); 3724 } 3725 3726 unsigned char * InstanceKlass::get_cached_class_file_bytes() { 3727 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file); 3728 }