1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "jvm.h" 27 #include "aot/aotLoader.hpp" 28 #include "classfile/classFileParser.hpp" 29 #include "classfile/classFileStream.hpp" 30 #include "classfile/classLoader.hpp" 31 #include "classfile/javaClasses.hpp" 32 #include "classfile/moduleEntry.hpp" 33 #include "classfile/systemDictionary.hpp" 34 #include "classfile/systemDictionaryShared.hpp" 35 #include "classfile/verifier.hpp" 36 #include "classfile/vmSymbols.hpp" 37 #include "code/dependencyContext.hpp" 38 #include "compiler/compileBroker.hpp" 39 #include "gc/shared/collectedHeap.inline.hpp" 40 #include "gc/shared/specialized_oop_closures.hpp" 41 #include "interpreter/oopMapCache.hpp" 42 #include "interpreter/rewriter.hpp" 43 #include "jvmtifiles/jvmti.h" 44 #include "logging/log.hpp" 45 #include "logging/logMessage.hpp" 46 #include "logging/logStream.hpp" 47 #include "memory/heapInspection.hpp" 48 #include "memory/iterator.inline.hpp" 49 #include "memory/metadataFactory.hpp" 50 #include "memory/metaspaceClosure.hpp" 51 #include "memory/metaspaceShared.hpp" 52 #include "memory/oopFactory.hpp" 53 #include "memory/resourceArea.hpp" 54 #include "oops/fieldStreams.hpp" 55 #include "oops/instanceClassLoaderKlass.hpp" 56 #include "oops/instanceKlass.inline.hpp" 57 #include "oops/instanceMirrorKlass.hpp" 58 #include "oops/instanceOop.hpp" 59 #include "oops/klass.inline.hpp" 60 #include "oops/method.hpp" 61 #include "oops/oop.inline.hpp" 62 #include "oops/symbol.hpp" 63 #include "oops/valueKlass.hpp" 64 #include "prims/jvmtiExport.hpp" 65 #include "prims/jvmtiRedefineClasses.hpp" 66 #include "prims/jvmtiThreadState.hpp" 67 #include "prims/methodComparator.hpp" 68 #include "runtime/atomic.hpp" 69 #include "runtime/fieldDescriptor.hpp" 70 #include "runtime/handles.inline.hpp" 71 #include "runtime/javaCalls.hpp" 72 #include "runtime/mutexLocker.hpp" 73 #include "runtime/orderAccess.inline.hpp" 74 #include "runtime/thread.inline.hpp" 75 #include "services/classLoadingService.hpp" 76 #include "services/threadService.hpp" 77 #include "utilities/dtrace.hpp" 78 #include "utilities/macros.hpp" 79 #include "utilities/stringUtils.hpp" 80 #ifdef COMPILER1 81 #include "c1/c1_Compiler.hpp" 82 #endif 83 84 #ifdef DTRACE_ENABLED 85 86 87 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED 88 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE 89 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT 90 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS 91 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED 92 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT 93 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR 94 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END 95 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \ 96 { \ 97 char* data = NULL; \ 98 int len = 0; \ 99 Symbol* clss_name = name(); \ 100 if (clss_name != NULL) { \ 101 data = (char*)clss_name->bytes(); \ 102 len = clss_name->utf8_length(); \ 103 } \ 104 HOTSPOT_CLASS_INITIALIZATION_##type( \ 105 data, len, (void*)class_loader(), thread_type); \ 106 } 107 108 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \ 109 { \ 110 char* data = NULL; \ 111 int len = 0; \ 112 Symbol* clss_name = name(); \ 113 if (clss_name != NULL) { \ 114 data = (char*)clss_name->bytes(); \ 115 len = clss_name->utf8_length(); \ 116 } \ 117 HOTSPOT_CLASS_INITIALIZATION_##type( \ 118 data, len, (void*)class_loader(), thread_type, wait); \ 119 } 120 121 #else // ndef DTRACE_ENABLED 122 123 #define DTRACE_CLASSINIT_PROBE(type, thread_type) 124 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) 125 126 #endif // ndef DTRACE_ENABLED 127 128 static inline bool is_class_loader(const Symbol* class_name, 129 const ClassFileParser& parser) { 130 assert(class_name != NULL, "invariant"); 131 132 if (class_name == vmSymbols::java_lang_ClassLoader()) { 133 return true; 134 } 135 136 if (SystemDictionary::ClassLoader_klass_loaded()) { 137 const Klass* const super_klass = parser.super_klass(); 138 if (super_klass != NULL) { 139 if (super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass())) { 140 return true; 141 } 142 } 143 } 144 return false; 145 } 146 147 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) { 148 const int size = InstanceKlass::size(parser.vtable_size(), 149 parser.itable_size(), 150 nonstatic_oop_map_size(parser.total_oop_map_count()), 151 parser.is_interface(), 152 parser.is_anonymous(), 153 should_store_fingerprint(parser.is_anonymous()), 154 parser.has_flattenable_fields() ? parser.java_fields_count() : 0, 155 parser.is_value_type()); 156 157 const Symbol* const class_name = parser.class_name(); 158 assert(class_name != NULL, "invariant"); 159 ClassLoaderData* loader_data = parser.loader_data(); 160 assert(loader_data != NULL, "invariant"); 161 162 InstanceKlass* ik; 163 164 // Allocation 165 if (REF_NONE == parser.reference_type()) { 166 if (class_name == vmSymbols::java_lang_Class()) { 167 // mirror 168 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser); 169 } else if (is_class_loader(class_name, parser)) { 170 // class loader 171 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser); 172 } else if (parser.is_value_type()) { 173 // value type 174 ik = new (loader_data, size, THREAD) ValueKlass(parser); 175 } else { 176 // normal 177 ik = new (loader_data, size, THREAD) InstanceKlass(parser, InstanceKlass::_misc_kind_other); 178 } 179 } else { 180 // reference 181 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser); 182 } 183 184 // Check for pending exception before adding to the loader data and incrementing 185 // class count. Can get OOM here. 186 if (HAS_PENDING_EXCEPTION) { 187 return NULL; 188 } 189 190 assert(ik != NULL, "invariant"); 191 192 const bool publicize = !parser.is_internal(); 193 #ifdef ASSERT 194 assert(ik->size() == size, ""); 195 ik->bounds_check((address) ik->start_of_vtable(), false, size); 196 ik->bounds_check((address) ik->start_of_itable(), false, size); 197 ik->bounds_check((address) ik->end_of_itable(), true, size); 198 ik->bounds_check((address) ik->end_of_nonstatic_oop_maps(), true, size); 199 #endif //ASSERT 200 201 // Add all classes to our internal class loader list here, 202 // including classes in the bootstrap (NULL) class loader. 203 loader_data->add_class(ik, publicize); 204 return ik; 205 } 206 207 #ifndef PRODUCT 208 bool InstanceKlass::bounds_check(address addr, bool edge_ok, intptr_t size_in_bytes) const { 209 const char* bad = NULL; 210 address end = NULL; 211 if (addr < (address)this) { 212 bad = "before"; 213 } else if (addr == (address)this) { 214 if (edge_ok) return true; 215 bad = "just before"; 216 } else if (addr == (end = (address)this + sizeof(intptr_t) * (size_in_bytes < 0 ? size() : size_in_bytes))) { 217 if (edge_ok) return true; 218 bad = "just after"; 219 } else if (addr > end) { 220 bad = "after"; 221 } else { 222 return true; 223 } 224 tty->print_cr("%s object bounds: " INTPTR_FORMAT " [" INTPTR_FORMAT ".." INTPTR_FORMAT "]", 225 bad, (intptr_t)addr, (intptr_t)this, (intptr_t)end); 226 Verbose = WizardMode = true; this->print(); //@@ 227 return false; 228 } 229 #endif //PRODUCT 230 231 // copy method ordering from resource area to Metaspace 232 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) { 233 if (m != NULL) { 234 // allocate a new array and copy contents (memcpy?) 235 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK); 236 for (int i = 0; i < m->length(); i++) { 237 _method_ordering->at_put(i, m->at(i)); 238 } 239 } else { 240 _method_ordering = Universe::the_empty_int_array(); 241 } 242 } 243 244 // create a new array of vtable_indices for default methods 245 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) { 246 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL); 247 assert(default_vtable_indices() == NULL, "only create once"); 248 set_default_vtable_indices(vtable_indices); 249 return vtable_indices; 250 } 251 252 InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind) : 253 _static_field_size(parser.static_field_size()), 254 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())), 255 _itable_len(parser.itable_size()), 256 _reference_type(parser.reference_type()), 257 _extra_flags(0), 258 _adr_valueklass_fixed_block(NULL) { 259 set_vtable_length(parser.vtable_size()); 260 set_kind(kind); 261 set_access_flags(parser.access_flags()); 262 set_is_anonymous(parser.is_anonymous()); 263 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(), 264 false)); 265 if (parser.has_flattenable_fields()) { 266 set_has_value_fields(); 267 } 268 _java_fields_count = parser.java_fields_count(); 269 270 assert(NULL == _methods, "underlying memory not zeroed?"); 271 assert(is_instance_klass(), "is layout incorrect?"); 272 assert(size_helper() == parser.layout_size(), "incorrect size_helper?"); 273 } 274 275 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data, 276 Array<Method*>* methods) { 277 if (methods != NULL && methods != Universe::the_empty_method_array() && 278 !methods->is_shared()) { 279 for (int i = 0; i < methods->length(); i++) { 280 Method* method = methods->at(i); 281 if (method == NULL) continue; // maybe null if error processing 282 // Only want to delete methods that are not executing for RedefineClasses. 283 // The previous version will point to them so they're not totally dangling 284 assert (!method->on_stack(), "shouldn't be called with methods on stack"); 285 MetadataFactory::free_metadata(loader_data, method); 286 } 287 MetadataFactory::free_array<Method*>(loader_data, methods); 288 } 289 } 290 291 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data, 292 const Klass* super_klass, 293 Array<Klass*>* local_interfaces, 294 Array<Klass*>* transitive_interfaces) { 295 // Only deallocate transitive interfaces if not empty, same as super class 296 // or same as local interfaces. See code in parseClassFile. 297 Array<Klass*>* ti = transitive_interfaces; 298 if (ti != Universe::the_empty_klass_array() && ti != local_interfaces) { 299 // check that the interfaces don't come from super class 300 Array<Klass*>* sti = (super_klass == NULL) ? NULL : 301 InstanceKlass::cast(super_klass)->transitive_interfaces(); 302 if (ti != sti && ti != NULL && !ti->is_shared()) { 303 MetadataFactory::free_array<Klass*>(loader_data, ti); 304 } 305 } 306 307 // local interfaces can be empty 308 if (local_interfaces != Universe::the_empty_klass_array() && 309 local_interfaces != NULL && !local_interfaces->is_shared()) { 310 MetadataFactory::free_array<Klass*>(loader_data, local_interfaces); 311 } 312 } 313 314 // This function deallocates the metadata and C heap pointers that the 315 // InstanceKlass points to. 316 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) { 317 318 // Orphan the mirror first, CMS thinks it's still live. 319 if (java_mirror() != NULL) { 320 java_lang_Class::set_klass(java_mirror(), NULL); 321 } 322 323 // Also remove mirror from handles 324 loader_data->remove_handle(_java_mirror); 325 326 // Need to take this class off the class loader data list. 327 loader_data->remove_class(this); 328 329 // The array_klass for this class is created later, after error handling. 330 // For class redefinition, we keep the original class so this scratch class 331 // doesn't have an array class. Either way, assert that there is nothing 332 // to deallocate. 333 assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet"); 334 335 // Release C heap allocated data that this might point to, which includes 336 // reference counting symbol names. 337 release_C_heap_structures(); 338 339 deallocate_methods(loader_data, methods()); 340 set_methods(NULL); 341 342 if (method_ordering() != NULL && 343 method_ordering() != Universe::the_empty_int_array() && 344 !method_ordering()->is_shared()) { 345 MetadataFactory::free_array<int>(loader_data, method_ordering()); 346 } 347 set_method_ordering(NULL); 348 349 // default methods can be empty 350 if (default_methods() != NULL && 351 default_methods() != Universe::the_empty_method_array() && 352 !default_methods()->is_shared()) { 353 MetadataFactory::free_array<Method*>(loader_data, default_methods()); 354 } 355 // Do NOT deallocate the default methods, they are owned by superinterfaces. 356 set_default_methods(NULL); 357 358 // default methods vtable indices can be empty 359 if (default_vtable_indices() != NULL && 360 !default_vtable_indices()->is_shared()) { 361 MetadataFactory::free_array<int>(loader_data, default_vtable_indices()); 362 } 363 set_default_vtable_indices(NULL); 364 365 366 // This array is in Klass, but remove it with the InstanceKlass since 367 // this place would be the only caller and it can share memory with transitive 368 // interfaces. 369 if (secondary_supers() != NULL && 370 secondary_supers() != Universe::the_empty_klass_array() && 371 secondary_supers() != transitive_interfaces() && 372 !secondary_supers()->is_shared()) { 373 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers()); 374 } 375 set_secondary_supers(NULL); 376 377 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces()); 378 set_transitive_interfaces(NULL); 379 set_local_interfaces(NULL); 380 381 if (fields() != NULL && !fields()->is_shared()) { 382 MetadataFactory::free_array<jushort>(loader_data, fields()); 383 } 384 set_fields(NULL, 0); 385 386 // If a method from a redefined class is using this constant pool, don't 387 // delete it, yet. The new class's previous version will point to this. 388 if (constants() != NULL) { 389 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack"); 390 if (!constants()->is_shared()) { 391 MetadataFactory::free_metadata(loader_data, constants()); 392 } 393 // Delete any cached resolution errors for the constant pool 394 SystemDictionary::delete_resolution_error(constants()); 395 396 set_constants(NULL); 397 } 398 399 if (inner_classes() != NULL && 400 inner_classes() != Universe::the_empty_short_array() && 401 !inner_classes()->is_shared()) { 402 MetadataFactory::free_array<jushort>(loader_data, inner_classes()); 403 } 404 set_inner_classes(NULL); 405 406 if (value_types() != NULL && !value_types()->is_shared()) { 407 MetadataFactory::free_array<ValueTypes>(loader_data, value_types()); 408 } 409 set_value_types(NULL); 410 411 // We should deallocate the Annotations instance if it's not in shared spaces. 412 if (annotations() != NULL && !annotations()->is_shared()) { 413 MetadataFactory::free_metadata(loader_data, annotations()); 414 } 415 set_annotations(NULL); 416 } 417 418 bool InstanceKlass::should_be_initialized() const { 419 return !is_initialized(); 420 } 421 422 klassItable InstanceKlass::itable() const { 423 return klassItable(const_cast<InstanceKlass*>(this)); 424 } 425 426 void InstanceKlass::eager_initialize(Thread *thread) { 427 if (!EagerInitialization) return; 428 429 if (this->is_not_initialized()) { 430 // abort if the the class has a class initializer 431 if (this->class_initializer() != NULL) return; 432 433 // abort if it is java.lang.Object (initialization is handled in genesis) 434 Klass* super_klass = super(); 435 if (super_klass == NULL) return; 436 437 // abort if the super class should be initialized 438 if (!InstanceKlass::cast(super_klass)->is_initialized()) return; 439 440 // call body to expose the this pointer 441 eager_initialize_impl(); 442 } 443 } 444 445 // JVMTI spec thinks there are signers and protection domain in the 446 // instanceKlass. These accessors pretend these fields are there. 447 // The hprof specification also thinks these fields are in InstanceKlass. 448 oop InstanceKlass::protection_domain() const { 449 // return the protection_domain from the mirror 450 return java_lang_Class::protection_domain(java_mirror()); 451 } 452 453 // To remove these from requires an incompatible change and CCC request. 454 objArrayOop InstanceKlass::signers() const { 455 // return the signers from the mirror 456 return java_lang_Class::signers(java_mirror()); 457 } 458 459 oop InstanceKlass::init_lock() const { 460 // return the init lock from the mirror 461 oop lock = java_lang_Class::init_lock(java_mirror()); 462 // Prevent reordering with any access of initialization state 463 OrderAccess::loadload(); 464 assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state 465 "only fully initialized state can have a null lock"); 466 return lock; 467 } 468 469 // Set the initialization lock to null so the object can be GC'ed. Any racing 470 // threads to get this lock will see a null lock and will not lock. 471 // That's okay because they all check for initialized state after getting 472 // the lock and return. 473 void InstanceKlass::fence_and_clear_init_lock() { 474 // make sure previous stores are all done, notably the init_state. 475 OrderAccess::storestore(); 476 java_lang_Class::set_init_lock(java_mirror(), NULL); 477 assert(!is_not_initialized(), "class must be initialized now"); 478 } 479 480 void InstanceKlass::eager_initialize_impl() { 481 EXCEPTION_MARK; 482 HandleMark hm(THREAD); 483 Handle h_init_lock(THREAD, init_lock()); 484 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 485 486 // abort if someone beat us to the initialization 487 if (!is_not_initialized()) return; // note: not equivalent to is_initialized() 488 489 ClassState old_state = init_state(); 490 link_class_impl(true, THREAD); 491 if (HAS_PENDING_EXCEPTION) { 492 CLEAR_PENDING_EXCEPTION; 493 // Abort if linking the class throws an exception. 494 495 // Use a test to avoid redundantly resetting the state if there's 496 // no change. Set_init_state() asserts that state changes make 497 // progress, whereas here we might just be spinning in place. 498 if (old_state != _init_state) 499 set_init_state(old_state); 500 } else { 501 // linking successfull, mark class as initialized 502 set_init_state(fully_initialized); 503 fence_and_clear_init_lock(); 504 // trace 505 if (log_is_enabled(Info, class, init)) { 506 ResourceMark rm(THREAD); 507 log_info(class, init)("[Initialized %s without side effects]", external_name()); 508 } 509 } 510 } 511 512 513 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization 514 // process. The step comments refers to the procedure described in that section. 515 // Note: implementation moved to static method to expose the this pointer. 516 void InstanceKlass::initialize(TRAPS) { 517 if (this->should_be_initialized()) { 518 initialize_impl(CHECK); 519 // Note: at this point the class may be initialized 520 // OR it may be in the state of being initialized 521 // in case of recursive initialization! 522 } else { 523 assert(is_initialized(), "sanity check"); 524 } 525 } 526 527 528 bool InstanceKlass::verify_code(bool throw_verifyerror, TRAPS) { 529 // 1) Verify the bytecodes 530 Verifier::Mode mode = 531 throw_verifyerror ? Verifier::ThrowException : Verifier::NoException; 532 return Verifier::verify(this, mode, should_verify_class(), THREAD); 533 } 534 535 536 // Used exclusively by the shared spaces dump mechanism to prevent 537 // classes mapped into the shared regions in new VMs from appearing linked. 538 539 void InstanceKlass::unlink_class() { 540 assert(is_linked(), "must be linked"); 541 _init_state = loaded; 542 } 543 544 void InstanceKlass::link_class(TRAPS) { 545 assert(is_loaded(), "must be loaded"); 546 if (!is_linked()) { 547 link_class_impl(true, CHECK); 548 } 549 } 550 551 // Called to verify that a class can link during initialization, without 552 // throwing a VerifyError. 553 bool InstanceKlass::link_class_or_fail(TRAPS) { 554 assert(is_loaded(), "must be loaded"); 555 if (!is_linked()) { 556 link_class_impl(false, CHECK_false); 557 } 558 return is_linked(); 559 } 560 561 bool InstanceKlass::link_class_impl(bool throw_verifyerror, TRAPS) { 562 if (DumpSharedSpaces && is_in_error_state()) { 563 // This is for CDS dumping phase only -- we use the in_error_state to indicate that 564 // the class has failed verification. Throwing the NoClassDefFoundError here is just 565 // a convenient way to stop repeat attempts to verify the same (bad) class. 566 // 567 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown 568 // if we are executing Java code. This is not a problem for CDS dumping phase since 569 // it doesn't execute any Java code. 570 ResourceMark rm(THREAD); 571 Exceptions::fthrow(THREAD_AND_LOCATION, 572 vmSymbols::java_lang_NoClassDefFoundError(), 573 "Class %s, or one of its supertypes, failed class initialization", 574 external_name()); 575 return false; 576 } 577 // return if already verified 578 if (is_linked()) { 579 return true; 580 } 581 582 // Timing 583 // timer handles recursion 584 assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl"); 585 JavaThread* jt = (JavaThread*)THREAD; 586 587 // link super class before linking this class 588 Klass* super_klass = super(); 589 if (super_klass != NULL) { 590 if (super_klass->is_interface()) { // check if super class is an interface 591 ResourceMark rm(THREAD); 592 Exceptions::fthrow( 593 THREAD_AND_LOCATION, 594 vmSymbols::java_lang_IncompatibleClassChangeError(), 595 "class %s has interface %s as super class", 596 external_name(), 597 super_klass->external_name() 598 ); 599 return false; 600 } 601 602 InstanceKlass* ik_super = InstanceKlass::cast(super_klass); 603 ik_super->link_class_impl(throw_verifyerror, CHECK_false); 604 } 605 606 // link all interfaces implemented by this class before linking this class 607 Array<Klass*>* interfaces = local_interfaces(); 608 int num_interfaces = interfaces->length(); 609 for (int index = 0; index < num_interfaces; index++) { 610 InstanceKlass* interk = InstanceKlass::cast(interfaces->at(index)); 611 interk->link_class_impl(throw_verifyerror, CHECK_false); 612 } 613 614 // If a value type is referenced by a class (either as a field type or a 615 // method argument or return type) this value type must be loaded during 616 // the linking of this class because size and properties of the value type 617 // must be known in order to be able to perform value type optimizations 618 619 // Note: circular dependencies between value types are not handled yet 620 621 // Note: one case is not handled yet: arrays of value types => FixMe 622 623 // Note: the current implementation is not optimized because the search for 624 // value types is performed on all classes. It would be more efficient to 625 // detect value types during verification and 'tag' the classes for which 626 // value type loading is required. However, this optimization won't be 627 // applicable to classes that are not verified 628 629 // First step: fields 630 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 631 ResourceMark rm(THREAD); 632 if (fs.field_descriptor().access_flags().is_flattenable()) { 633 Symbol* signature = fs.field_descriptor().signature(); 634 ResourceMark rm; 635 Symbol* name = SymbolTable::lookup(signature->as_C_string() + 1, 636 signature->utf8_length() - 2, CHECK_false); 637 assert(this->is_declared_value_type(name), "Verifying consistency with ValueTypes attribute"); 638 name->decrement_refcount(); 639 name = NULL; 640 // Get current loader and protection domain first. 641 oop loader = class_loader(); 642 oop prot_domain = protection_domain(); 643 Klass* klass = SystemDictionary::resolve_or_fail(signature, 644 Handle(THREAD, loader), Handle(THREAD, prot_domain), true, 645 CHECK_false); 646 if (klass == NULL) { 647 THROW_(vmSymbols::java_lang_LinkageError(), false); 648 } 649 } 650 } 651 652 // Second step: methods arguments and return types 653 for (int i = 0; i < constants()->length(); i++) { 654 if (constants()->tag_at(i).is_method()) { 655 Symbol* signature = constants()->uncached_signature_ref_at(i); 656 ResourceMark rm(THREAD); 657 for (SignatureStream ss(signature); !ss.is_done(); ss.next()) { 658 Symbol* sig = ss.as_symbol(THREAD); 659 if (is_declared_value_type(sig)) { 660 // Get current loader and protection domain first. 661 oop loader = class_loader(); 662 oop protection_domain = this->protection_domain(); 663 664 bool ok = SystemDictionary::resolve_or_fail(sig, 665 Handle(THREAD, loader), Handle(THREAD, protection_domain), true, 666 CHECK_false); 667 if (!ok) { 668 THROW_(vmSymbols::java_lang_LinkageError(), false); 669 } 670 } 671 } 672 } 673 } 674 675 // in case the class is linked in the process of linking its superclasses 676 if (is_linked()) { 677 return true; 678 } 679 680 // trace only the link time for this klass that includes 681 // the verification time 682 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(), 683 ClassLoader::perf_class_link_selftime(), 684 ClassLoader::perf_classes_linked(), 685 jt->get_thread_stat()->perf_recursion_counts_addr(), 686 jt->get_thread_stat()->perf_timers_addr(), 687 PerfClassTraceTime::CLASS_LINK); 688 689 // verification & rewriting 690 { 691 HandleMark hm(THREAD); 692 Handle h_init_lock(THREAD, init_lock()); 693 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 694 // rewritten will have been set if loader constraint error found 695 // on an earlier link attempt 696 // don't verify or rewrite if already rewritten 697 // 698 699 if (!is_linked()) { 700 if (!is_rewritten()) { 701 { 702 bool verify_ok = verify_code(throw_verifyerror, THREAD); 703 if (!verify_ok) { 704 return false; 705 } 706 } 707 708 // Just in case a side-effect of verify linked this class already 709 // (which can sometimes happen since the verifier loads classes 710 // using custom class loaders, which are free to initialize things) 711 if (is_linked()) { 712 return true; 713 } 714 715 // also sets rewritten 716 rewrite_class(CHECK_false); 717 } else if (is_shared()) { 718 SystemDictionaryShared::check_verification_constraints(this, CHECK_false); 719 } 720 721 // relocate jsrs and link methods after they are all rewritten 722 link_methods(CHECK_false); 723 724 // Initialize the vtable and interface table after 725 // methods have been rewritten since rewrite may 726 // fabricate new Method*s. 727 // also does loader constraint checking 728 // 729 // initialize_vtable and initialize_itable need to be rerun for 730 // a shared class if the class is not loaded by the NULL classloader. 731 ClassLoaderData * loader_data = class_loader_data(); 732 if (!(is_shared() && 733 loader_data->is_the_null_class_loader_data())) { 734 ResourceMark rm(THREAD); 735 vtable().initialize_vtable(true, CHECK_false); 736 itable().initialize_itable(true, CHECK_false); 737 } 738 #ifdef ASSERT 739 else { 740 vtable().verify(tty, true); 741 // In case itable verification is ever added. 742 // itable().verify(tty, true); 743 } 744 #endif 745 746 set_init_state(linked); 747 if (JvmtiExport::should_post_class_prepare()) { 748 Thread *thread = THREAD; 749 assert(thread->is_Java_thread(), "thread->is_Java_thread()"); 750 JvmtiExport::post_class_prepare((JavaThread *) thread, this); 751 } 752 } 753 } 754 return true; 755 } 756 757 758 // Rewrite the byte codes of all of the methods of a class. 759 // The rewriter must be called exactly once. Rewriting must happen after 760 // verification but before the first method of the class is executed. 761 void InstanceKlass::rewrite_class(TRAPS) { 762 assert(is_loaded(), "must be loaded"); 763 if (is_rewritten()) { 764 assert(is_shared(), "rewriting an unshared class?"); 765 return; 766 } 767 Rewriter::rewrite(this, CHECK); 768 set_rewritten(); 769 } 770 771 // Now relocate and link method entry points after class is rewritten. 772 // This is outside is_rewritten flag. In case of an exception, it can be 773 // executed more than once. 774 void InstanceKlass::link_methods(TRAPS) { 775 int len = methods()->length(); 776 for (int i = len-1; i >= 0; i--) { 777 methodHandle m(THREAD, methods()->at(i)); 778 779 // Set up method entry points for compiler and interpreter . 780 m->link_method(m, CHECK); 781 } 782 } 783 784 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access) 785 void InstanceKlass::initialize_super_interfaces(TRAPS) { 786 assert (has_nonstatic_concrete_methods(), "caller should have checked this"); 787 for (int i = 0; i < local_interfaces()->length(); ++i) { 788 Klass* iface = local_interfaces()->at(i); 789 InstanceKlass* ik = InstanceKlass::cast(iface); 790 791 // Initialization is depth first search ie. we start with top of the inheritance tree 792 // has_nonstatic_concrete_methods drives searching superinterfaces since it 793 // means has_nonstatic_concrete_methods in its superinterface hierarchy 794 if (ik->has_nonstatic_concrete_methods()) { 795 ik->initialize_super_interfaces(CHECK); 796 } 797 798 // Only initialize() interfaces that "declare" concrete methods. 799 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) { 800 ik->initialize(CHECK); 801 } 802 } 803 } 804 805 void InstanceKlass::initialize_impl(TRAPS) { 806 HandleMark hm(THREAD); 807 808 // Make sure klass is linked (verified) before initialization 809 // A class could already be verified, since it has been reflected upon. 810 link_class(CHECK); 811 812 DTRACE_CLASSINIT_PROBE(required, -1); 813 814 bool wait = false; 815 816 // refer to the JVM book page 47 for description of steps 817 // Step 1 818 { 819 Handle h_init_lock(THREAD, init_lock()); 820 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 821 822 Thread *self = THREAD; // it's passed the current thread 823 824 // Step 2 825 // If we were to use wait() instead of waitInterruptibly() then 826 // we might end up throwing IE from link/symbol resolution sites 827 // that aren't expected to throw. This would wreak havoc. See 6320309. 828 while(is_being_initialized() && !is_reentrant_initialization(self)) { 829 wait = true; 830 ol.waitUninterruptibly(CHECK); 831 } 832 833 // Step 3 834 if (is_being_initialized() && is_reentrant_initialization(self)) { 835 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait); 836 return; 837 } 838 839 // Step 4 840 if (is_initialized()) { 841 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait); 842 return; 843 } 844 845 // Step 5 846 if (is_in_error_state()) { 847 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait); 848 ResourceMark rm(THREAD); 849 const char* desc = "Could not initialize class "; 850 const char* className = external_name(); 851 size_t msglen = strlen(desc) + strlen(className) + 1; 852 char* message = NEW_RESOURCE_ARRAY(char, msglen); 853 if (NULL == message) { 854 // Out of memory: can't create detailed error message 855 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className); 856 } else { 857 jio_snprintf(message, msglen, "%s%s", desc, className); 858 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message); 859 } 860 } 861 862 // Step 6 863 set_init_state(being_initialized); 864 set_init_thread(self); 865 } 866 867 // Step 7 868 // Next, if C is a class rather than an interface, initialize it's super class and super 869 // interfaces. 870 if (!is_interface()) { 871 Klass* super_klass = super(); 872 if (super_klass != NULL && super_klass->should_be_initialized()) { 873 super_klass->initialize(THREAD); 874 } 875 // If C implements any interface that declares a non-static, concrete method, 876 // the initialization of C triggers initialization of its super interfaces. 877 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and 878 // having a superinterface that declares, non-static, concrete methods 879 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) { 880 initialize_super_interfaces(THREAD); 881 } 882 883 // If any exceptions, complete abruptly, throwing the same exception as above. 884 if (HAS_PENDING_EXCEPTION) { 885 Handle e(THREAD, PENDING_EXCEPTION); 886 CLEAR_PENDING_EXCEPTION; 887 { 888 EXCEPTION_MARK; 889 // Locks object, set state, and notify all waiting threads 890 set_initialization_state_and_notify(initialization_error, THREAD); 891 CLEAR_PENDING_EXCEPTION; 892 } 893 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait); 894 THROW_OOP(e()); 895 } 896 } 897 898 // Step 8 899 // Initialize classes of flattenable fields 900 { 901 for (AllFieldStream fs(this); !fs.done(); fs.next()) { 902 if (fs.is_flattenable()) { 903 InstanceKlass* field_klass = InstanceKlass::cast(this->get_value_field_klass(fs.index())); 904 field_klass->initialize(CHECK); 905 } 906 } 907 } 908 909 910 // Look for aot compiled methods for this klass, including class initializer. 911 AOTLoader::load_for_klass(this, THREAD); 912 913 // Step 9 914 { 915 assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl"); 916 JavaThread* jt = (JavaThread*)THREAD; 917 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait); 918 // Timer includes any side effects of class initialization (resolution, 919 // etc), but not recursive entry into call_class_initializer(). 920 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(), 921 ClassLoader::perf_class_init_selftime(), 922 ClassLoader::perf_classes_inited(), 923 jt->get_thread_stat()->perf_recursion_counts_addr(), 924 jt->get_thread_stat()->perf_timers_addr(), 925 PerfClassTraceTime::CLASS_CLINIT); 926 call_class_initializer(THREAD); 927 } 928 929 // Step 10 930 if (!HAS_PENDING_EXCEPTION) { 931 set_initialization_state_and_notify(fully_initialized, CHECK); 932 { 933 debug_only(vtable().verify(tty, true);) 934 } 935 } 936 else { 937 // Step 11 and 12 938 Handle e(THREAD, PENDING_EXCEPTION); 939 CLEAR_PENDING_EXCEPTION; 940 // JVMTI has already reported the pending exception 941 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 942 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 943 { 944 EXCEPTION_MARK; 945 set_initialization_state_and_notify(initialization_error, THREAD); 946 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below 947 // JVMTI has already reported the pending exception 948 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 949 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 950 } 951 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait); 952 if (e->is_a(SystemDictionary::Error_klass())) { 953 THROW_OOP(e()); 954 } else { 955 JavaCallArguments args(e); 956 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(), 957 vmSymbols::throwable_void_signature(), 958 &args); 959 } 960 } 961 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait); 962 } 963 964 965 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) { 966 Handle h_init_lock(THREAD, init_lock()); 967 if (h_init_lock() != NULL) { 968 ObjectLocker ol(h_init_lock, THREAD); 969 set_init_state(state); 970 fence_and_clear_init_lock(); 971 ol.notify_all(CHECK); 972 } else { 973 assert(h_init_lock() != NULL, "The initialization state should never be set twice"); 974 set_init_state(state); 975 } 976 } 977 978 // The embedded _implementor field can only record one implementor. 979 // When there are more than one implementors, the _implementor field 980 // is set to the interface Klass* itself. Following are the possible 981 // values for the _implementor field: 982 // NULL - no implementor 983 // implementor Klass* - one implementor 984 // self - more than one implementor 985 // 986 // The _implementor field only exists for interfaces. 987 void InstanceKlass::add_implementor(Klass* k) { 988 assert(Compile_lock->owned_by_self(), ""); 989 assert(is_interface(), "not interface"); 990 // Filter out my subinterfaces. 991 // (Note: Interfaces are never on the subklass list.) 992 if (InstanceKlass::cast(k)->is_interface()) return; 993 994 // Filter out subclasses whose supers already implement me. 995 // (Note: CHA must walk subclasses of direct implementors 996 // in order to locate indirect implementors.) 997 Klass* sk = k->super(); 998 if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this)) 999 // We only need to check one immediate superclass, since the 1000 // implements_interface query looks at transitive_interfaces. 1001 // Any supers of the super have the same (or fewer) transitive_interfaces. 1002 return; 1003 1004 Klass* ik = implementor(); 1005 if (ik == NULL) { 1006 set_implementor(k); 1007 } else if (ik != this) { 1008 // There is already an implementor. Use itself as an indicator of 1009 // more than one implementors. 1010 set_implementor(this); 1011 } 1012 1013 // The implementor also implements the transitive_interfaces 1014 for (int index = 0; index < local_interfaces()->length(); index++) { 1015 InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k); 1016 } 1017 } 1018 1019 void InstanceKlass::init_implementor() { 1020 if (is_interface()) { 1021 set_implementor(NULL); 1022 } 1023 } 1024 1025 1026 void InstanceKlass::process_interfaces(Thread *thread) { 1027 // link this class into the implementors list of every interface it implements 1028 for (int i = local_interfaces()->length() - 1; i >= 0; i--) { 1029 assert(local_interfaces()->at(i)->is_klass(), "must be a klass"); 1030 InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i)); 1031 assert(interf->is_interface(), "expected interface"); 1032 interf->add_implementor(this); 1033 } 1034 } 1035 1036 bool InstanceKlass::can_be_primary_super_slow() const { 1037 if (is_interface()) 1038 return false; 1039 else 1040 return Klass::can_be_primary_super_slow(); 1041 } 1042 1043 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots) { 1044 // The secondaries are the implemented interfaces. 1045 Array<Klass*>* interfaces = transitive_interfaces(); 1046 int num_secondaries = num_extra_slots + interfaces->length(); 1047 if (num_secondaries == 0) { 1048 // Must share this for correct bootstrapping! 1049 set_secondary_supers(Universe::the_empty_klass_array()); 1050 return NULL; 1051 } else if (num_extra_slots == 0) { 1052 // The secondary super list is exactly the same as the transitive interfaces. 1053 // Redefine classes has to be careful not to delete this! 1054 set_secondary_supers(interfaces); 1055 return NULL; 1056 } else { 1057 // Copy transitive interfaces to a temporary growable array to be constructed 1058 // into the secondary super list with extra slots. 1059 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length()); 1060 for (int i = 0; i < interfaces->length(); i++) { 1061 secondaries->push(interfaces->at(i)); 1062 } 1063 return secondaries; 1064 } 1065 } 1066 1067 bool InstanceKlass::compute_is_subtype_of(Klass* k) { 1068 if (k->is_interface()) { 1069 return implements_interface(k); 1070 } else { 1071 return Klass::compute_is_subtype_of(k); 1072 } 1073 } 1074 1075 bool InstanceKlass::implements_interface(Klass* k) const { 1076 if (this == k) return true; 1077 assert(k->is_interface(), "should be an interface class"); 1078 for (int i = 0; i < transitive_interfaces()->length(); i++) { 1079 if (transitive_interfaces()->at(i) == k) { 1080 return true; 1081 } 1082 } 1083 return false; 1084 } 1085 1086 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const { 1087 // Verify direct super interface 1088 if (this == k) return true; 1089 assert(k->is_interface(), "should be an interface class"); 1090 for (int i = 0; i < local_interfaces()->length(); i++) { 1091 if (local_interfaces()->at(i) == k) { 1092 return true; 1093 } 1094 } 1095 return false; 1096 } 1097 1098 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) { 1099 if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException()); 1100 if (length > arrayOopDesc::max_array_length(T_OBJECT)) { 1101 report_java_out_of_memory("Requested array size exceeds VM limit"); 1102 JvmtiExport::post_array_size_exhausted(); 1103 THROW_OOP_0(Universe::out_of_memory_error_array_size()); 1104 } 1105 int size = objArrayOopDesc::object_size(length); 1106 Klass* ak = array_klass(n, CHECK_NULL); 1107 objArrayOop o = 1108 (objArrayOop)CollectedHeap::array_allocate(ak, size, length, CHECK_NULL); 1109 return o; 1110 } 1111 1112 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) { 1113 if (TraceFinalizerRegistration) { 1114 tty->print("Registered "); 1115 i->print_value_on(tty); 1116 tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i)); 1117 } 1118 instanceHandle h_i(THREAD, i); 1119 // Pass the handle as argument, JavaCalls::call expects oop as jobjects 1120 JavaValue result(T_VOID); 1121 JavaCallArguments args(h_i); 1122 methodHandle mh (THREAD, Universe::finalizer_register_method()); 1123 JavaCalls::call(&result, mh, &args, CHECK_NULL); 1124 return h_i(); 1125 } 1126 1127 instanceOop InstanceKlass::allocate_instance(TRAPS) { 1128 bool has_finalizer_flag = has_finalizer(); // Query before possible GC 1129 int size = size_helper(); // Query before forming handle. 1130 1131 instanceOop i; 1132 1133 i = (instanceOop)CollectedHeap::obj_allocate(this, size, CHECK_NULL); 1134 if (has_finalizer_flag && !RegisterFinalizersAtInit) { 1135 i = register_finalizer(i, CHECK_NULL); 1136 } 1137 return i; 1138 } 1139 1140 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) { 1141 if (is_interface() || is_abstract()) { 1142 ResourceMark rm(THREAD); 1143 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError() 1144 : vmSymbols::java_lang_InstantiationException(), external_name()); 1145 } 1146 if (this == SystemDictionary::Class_klass()) { 1147 ResourceMark rm(THREAD); 1148 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError() 1149 : vmSymbols::java_lang_IllegalAccessException(), external_name()); 1150 } 1151 } 1152 1153 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) { 1154 // Need load-acquire for lock-free read 1155 if (array_klasses_acquire() == NULL) { 1156 if (or_null) return NULL; 1157 1158 ResourceMark rm; 1159 JavaThread *jt = (JavaThread *)THREAD; 1160 { 1161 // Atomic creation of array_klasses 1162 MutexLocker mc(Compile_lock, THREAD); // for vtables 1163 MutexLocker ma(MultiArray_lock, THREAD); 1164 1165 // Check if update has already taken place 1166 if (array_klasses() == NULL) { 1167 Klass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL); 1168 // use 'release' to pair with lock-free load 1169 release_set_array_klasses(k); 1170 } 1171 } 1172 } 1173 // _this will always be set at this point 1174 ObjArrayKlass* oak = (ObjArrayKlass*)array_klasses(); 1175 if (or_null) { 1176 return oak->array_klass_or_null(n); 1177 } 1178 return oak->array_klass(n, THREAD); 1179 } 1180 1181 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) { 1182 return array_klass_impl(or_null, 1, THREAD); 1183 } 1184 1185 static int call_class_initializer_counter = 0; // for debugging 1186 1187 Method* InstanceKlass::class_initializer() const { 1188 Method* clinit = find_method( 1189 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature()); 1190 if (clinit != NULL && clinit->has_valid_initializer_flags()) { 1191 return clinit; 1192 } 1193 return NULL; 1194 } 1195 1196 void InstanceKlass::call_class_initializer(TRAPS) { 1197 if (ReplayCompiles && 1198 (ReplaySuppressInitializers == 1 || 1199 (ReplaySuppressInitializers >= 2 && class_loader() != NULL))) { 1200 // Hide the existence of the initializer for the purpose of replaying the compile 1201 return; 1202 } 1203 1204 methodHandle h_method(THREAD, class_initializer()); 1205 assert(!is_initialized(), "we cannot initialize twice"); 1206 LogTarget(Info, class, init) lt; 1207 if (lt.is_enabled()) { 1208 ResourceMark rm; 1209 LogStream ls(lt); 1210 ls.print("%d Initializing ", call_class_initializer_counter++); 1211 name()->print_value_on(&ls); 1212 ls.print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this)); 1213 } 1214 if (h_method() != NULL) { 1215 JavaCallArguments args; // No arguments 1216 JavaValue result(T_VOID); 1217 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args) 1218 } 1219 } 1220 1221 1222 void InstanceKlass::mask_for(const methodHandle& method, int bci, 1223 InterpreterOopMap* entry_for) { 1224 // Lazily create the _oop_map_cache at first request 1225 // Lock-free access requires load_acquire. 1226 OopMapCache* oop_map_cache = OrderAccess::load_acquire(&_oop_map_cache); 1227 if (oop_map_cache == NULL) { 1228 MutexLockerEx x(OopMapCacheAlloc_lock, Mutex::_no_safepoint_check_flag); 1229 // Check if _oop_map_cache was allocated while we were waiting for this lock 1230 if ((oop_map_cache = _oop_map_cache) == NULL) { 1231 oop_map_cache = new OopMapCache(); 1232 // Ensure _oop_map_cache is stable, since it is examined without a lock 1233 OrderAccess::release_store(&_oop_map_cache, oop_map_cache); 1234 } 1235 } 1236 // _oop_map_cache is constant after init; lookup below does its own locking. 1237 oop_map_cache->lookup(method, bci, entry_for); 1238 } 1239 1240 1241 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1242 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1243 Symbol* f_name = fs.name(); 1244 Symbol* f_sig = fs.signature(); 1245 if (f_name == name && f_sig == sig) { 1246 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1247 return true; 1248 } 1249 } 1250 return false; 1251 } 1252 1253 1254 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1255 const int n = local_interfaces()->length(); 1256 for (int i = 0; i < n; i++) { 1257 Klass* intf1 = local_interfaces()->at(i); 1258 assert(intf1->is_interface(), "just checking type"); 1259 // search for field in current interface 1260 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) { 1261 assert(fd->is_static(), "interface field must be static"); 1262 return intf1; 1263 } 1264 // search for field in direct superinterfaces 1265 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd); 1266 if (intf2 != NULL) return intf2; 1267 } 1268 // otherwise field lookup fails 1269 return NULL; 1270 } 1271 1272 1273 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1274 // search order according to newest JVM spec (5.4.3.2, p.167). 1275 // 1) search for field in current klass 1276 if (find_local_field(name, sig, fd)) { 1277 return const_cast<InstanceKlass*>(this); 1278 } 1279 // 2) search for field recursively in direct superinterfaces 1280 { Klass* intf = find_interface_field(name, sig, fd); 1281 if (intf != NULL) return intf; 1282 } 1283 // 3) apply field lookup recursively if superclass exists 1284 { Klass* supr = super(); 1285 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd); 1286 } 1287 // 4) otherwise field lookup fails 1288 return NULL; 1289 } 1290 1291 1292 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const { 1293 // search order according to newest JVM spec (5.4.3.2, p.167). 1294 // 1) search for field in current klass 1295 if (find_local_field(name, sig, fd)) { 1296 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this); 1297 } 1298 // 2) search for field recursively in direct superinterfaces 1299 if (is_static) { 1300 Klass* intf = find_interface_field(name, sig, fd); 1301 if (intf != NULL) return intf; 1302 } 1303 // 3) apply field lookup recursively if superclass exists 1304 { Klass* supr = super(); 1305 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd); 1306 } 1307 // 4) otherwise field lookup fails 1308 return NULL; 1309 } 1310 1311 1312 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1313 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1314 if (fs.offset() == offset) { 1315 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1316 if (fd->is_static() == is_static) return true; 1317 } 1318 } 1319 return false; 1320 } 1321 1322 1323 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1324 Klass* klass = const_cast<InstanceKlass*>(this); 1325 while (klass != NULL) { 1326 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) { 1327 return true; 1328 } 1329 klass = klass->super(); 1330 } 1331 return false; 1332 } 1333 1334 1335 void InstanceKlass::methods_do(void f(Method* method)) { 1336 // Methods aren't stable until they are loaded. This can be read outside 1337 // a lock through the ClassLoaderData for profiling 1338 if (!is_loaded()) { 1339 return; 1340 } 1341 1342 int len = methods()->length(); 1343 for (int index = 0; index < len; index++) { 1344 Method* m = methods()->at(index); 1345 assert(m->is_method(), "must be method"); 1346 f(m); 1347 } 1348 } 1349 1350 1351 void InstanceKlass::do_local_static_fields(FieldClosure* cl) { 1352 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1353 if (fs.access_flags().is_static()) { 1354 fieldDescriptor& fd = fs.field_descriptor(); 1355 cl->do_field(&fd); 1356 } 1357 } 1358 } 1359 1360 1361 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) { 1362 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1363 if (fs.access_flags().is_static()) { 1364 fieldDescriptor& fd = fs.field_descriptor(); 1365 f(&fd, mirror, CHECK); 1366 } 1367 } 1368 } 1369 1370 1371 static int compare_fields_by_offset(int* a, int* b) { 1372 return a[0] - b[0]; 1373 } 1374 1375 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) { 1376 InstanceKlass* super = superklass(); 1377 if (super != NULL) { 1378 super->do_nonstatic_fields(cl); 1379 } 1380 fieldDescriptor fd; 1381 int length = java_fields_count(); 1382 // In DebugInfo nonstatic fields are sorted by offset. 1383 int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass); 1384 int j = 0; 1385 for (int i = 0; i < length; i += 1) { 1386 fd.reinitialize(this, i); 1387 if (!fd.is_static()) { 1388 fields_sorted[j + 0] = fd.offset(); 1389 fields_sorted[j + 1] = i; 1390 j += 2; 1391 } 1392 } 1393 if (j > 0) { 1394 length = j; 1395 // _sort_Fn is defined in growableArray.hpp. 1396 qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset); 1397 for (int i = 0; i < length; i += 2) { 1398 fd.reinitialize(this, fields_sorted[i + 1]); 1399 assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields"); 1400 cl->do_field(&fd); 1401 } 1402 } 1403 FREE_C_HEAP_ARRAY(int, fields_sorted); 1404 } 1405 1406 1407 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) { 1408 if (array_klasses() != NULL) 1409 ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD); 1410 } 1411 1412 void InstanceKlass::array_klasses_do(void f(Klass* k)) { 1413 if (array_klasses() != NULL) 1414 ArrayKlass::cast(array_klasses())->array_klasses_do(f); 1415 } 1416 1417 #ifdef ASSERT 1418 static int linear_search(const Array<Method*>* methods, 1419 const Symbol* name, 1420 const Symbol* signature) { 1421 const int len = methods->length(); 1422 for (int index = 0; index < len; index++) { 1423 const Method* const m = methods->at(index); 1424 assert(m->is_method(), "must be method"); 1425 if (m->signature() == signature && m->name() == name) { 1426 return index; 1427 } 1428 } 1429 return -1; 1430 } 1431 #endif 1432 1433 static int binary_search(const Array<Method*>* methods, const Symbol* name) { 1434 int len = methods->length(); 1435 // methods are sorted, so do binary search 1436 int l = 0; 1437 int h = len - 1; 1438 while (l <= h) { 1439 int mid = (l + h) >> 1; 1440 Method* m = methods->at(mid); 1441 assert(m->is_method(), "must be method"); 1442 int res = m->name()->fast_compare(name); 1443 if (res == 0) { 1444 return mid; 1445 } else if (res < 0) { 1446 l = mid + 1; 1447 } else { 1448 h = mid - 1; 1449 } 1450 } 1451 return -1; 1452 } 1453 1454 // find_method looks up the name/signature in the local methods array 1455 Method* InstanceKlass::find_method(const Symbol* name, 1456 const Symbol* signature) const { 1457 return find_method_impl(name, signature, find_overpass, find_static, find_private); 1458 } 1459 1460 Method* InstanceKlass::find_method_impl(const Symbol* name, 1461 const Symbol* signature, 1462 OverpassLookupMode overpass_mode, 1463 StaticLookupMode static_mode, 1464 PrivateLookupMode private_mode) const { 1465 return InstanceKlass::find_method_impl(methods(), 1466 name, 1467 signature, 1468 overpass_mode, 1469 static_mode, 1470 private_mode); 1471 } 1472 1473 // find_instance_method looks up the name/signature in the local methods array 1474 // and skips over static methods 1475 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods, 1476 const Symbol* name, 1477 const Symbol* signature) { 1478 Method* const meth = InstanceKlass::find_method_impl(methods, 1479 name, 1480 signature, 1481 find_overpass, 1482 skip_static, 1483 find_private); 1484 assert(((meth == NULL) || !meth->is_static()), 1485 "find_instance_method should have skipped statics"); 1486 return meth; 1487 } 1488 1489 // find_instance_method looks up the name/signature in the local methods array 1490 // and skips over static methods 1491 Method* InstanceKlass::find_instance_method(const Symbol* name, const Symbol* signature) const { 1492 return InstanceKlass::find_instance_method(methods(), name, signature); 1493 } 1494 1495 // Find looks up the name/signature in the local methods array 1496 // and filters on the overpass, static and private flags 1497 // This returns the first one found 1498 // note that the local methods array can have up to one overpass, one static 1499 // and one instance (private or not) with the same name/signature 1500 Method* InstanceKlass::find_local_method(const Symbol* name, 1501 const Symbol* signature, 1502 OverpassLookupMode overpass_mode, 1503 StaticLookupMode static_mode, 1504 PrivateLookupMode private_mode) const { 1505 return InstanceKlass::find_method_impl(methods(), 1506 name, 1507 signature, 1508 overpass_mode, 1509 static_mode, 1510 private_mode); 1511 } 1512 1513 // Find looks up the name/signature in the local methods array 1514 // and filters on the overpass, static and private flags 1515 // This returns the first one found 1516 // note that the local methods array can have up to one overpass, one static 1517 // and one instance (private or not) with the same name/signature 1518 Method* InstanceKlass::find_local_method(const Array<Method*>* methods, 1519 const Symbol* name, 1520 const Symbol* signature, 1521 OverpassLookupMode overpass_mode, 1522 StaticLookupMode static_mode, 1523 PrivateLookupMode private_mode) { 1524 return InstanceKlass::find_method_impl(methods, 1525 name, 1526 signature, 1527 overpass_mode, 1528 static_mode, 1529 private_mode); 1530 } 1531 1532 Method* InstanceKlass::find_method(const Array<Method*>* methods, 1533 const Symbol* name, 1534 const Symbol* signature) { 1535 return InstanceKlass::find_method_impl(methods, 1536 name, 1537 signature, 1538 find_overpass, 1539 find_static, 1540 find_private); 1541 } 1542 1543 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods, 1544 const Symbol* name, 1545 const Symbol* signature, 1546 OverpassLookupMode overpass_mode, 1547 StaticLookupMode static_mode, 1548 PrivateLookupMode private_mode) { 1549 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode); 1550 return hit >= 0 ? methods->at(hit): NULL; 1551 } 1552 1553 // true if method matches signature and conforms to skipping_X conditions. 1554 static bool method_matches(const Method* m, 1555 const Symbol* signature, 1556 bool skipping_overpass, 1557 bool skipping_static, 1558 bool skipping_private) { 1559 return ((m->signature() == signature) && 1560 (!skipping_overpass || !m->is_overpass()) && 1561 (!skipping_static || !m->is_static()) && 1562 (!skipping_private || !m->is_private())); 1563 } 1564 1565 // Used directly for default_methods to find the index into the 1566 // default_vtable_indices, and indirectly by find_method 1567 // find_method_index looks in the local methods array to return the index 1568 // of the matching name/signature. If, overpass methods are being ignored, 1569 // the search continues to find a potential non-overpass match. This capability 1570 // is important during method resolution to prefer a static method, for example, 1571 // over an overpass method. 1572 // There is the possibility in any _method's array to have the same name/signature 1573 // for a static method, an overpass method and a local instance method 1574 // To correctly catch a given method, the search criteria may need 1575 // to explicitly skip the other two. For local instance methods, it 1576 // is often necessary to skip private methods 1577 int InstanceKlass::find_method_index(const Array<Method*>* methods, 1578 const Symbol* name, 1579 const Symbol* signature, 1580 OverpassLookupMode overpass_mode, 1581 StaticLookupMode static_mode, 1582 PrivateLookupMode private_mode) { 1583 const bool skipping_overpass = (overpass_mode == skip_overpass); 1584 const bool skipping_static = (static_mode == skip_static); 1585 const bool skipping_private = (private_mode == skip_private); 1586 const int hit = binary_search(methods, name); 1587 if (hit != -1) { 1588 const Method* const m = methods->at(hit); 1589 1590 // Do linear search to find matching signature. First, quick check 1591 // for common case, ignoring overpasses if requested. 1592 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1593 return hit; 1594 } 1595 1596 // search downwards through overloaded methods 1597 int i; 1598 for (i = hit - 1; i >= 0; --i) { 1599 const Method* const m = methods->at(i); 1600 assert(m->is_method(), "must be method"); 1601 if (m->name() != name) { 1602 break; 1603 } 1604 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1605 return i; 1606 } 1607 } 1608 // search upwards 1609 for (i = hit + 1; i < methods->length(); ++i) { 1610 const Method* const m = methods->at(i); 1611 assert(m->is_method(), "must be method"); 1612 if (m->name() != name) { 1613 break; 1614 } 1615 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1616 return i; 1617 } 1618 } 1619 // not found 1620 #ifdef ASSERT 1621 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 : 1622 linear_search(methods, name, signature); 1623 assert(-1 == index, "binary search should have found entry %d", index); 1624 #endif 1625 } 1626 return -1; 1627 } 1628 1629 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const { 1630 return find_method_by_name(methods(), name, end); 1631 } 1632 1633 int InstanceKlass::find_method_by_name(const Array<Method*>* methods, 1634 const Symbol* name, 1635 int* end_ptr) { 1636 assert(end_ptr != NULL, "just checking"); 1637 int start = binary_search(methods, name); 1638 int end = start + 1; 1639 if (start != -1) { 1640 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start; 1641 while (end < methods->length() && (methods->at(end))->name() == name) ++end; 1642 *end_ptr = end; 1643 return start; 1644 } 1645 return -1; 1646 } 1647 1648 // uncached_lookup_method searches both the local class methods array and all 1649 // superclasses methods arrays, skipping any overpass methods in superclasses. 1650 Method* InstanceKlass::uncached_lookup_method(const Symbol* name, 1651 const Symbol* signature, 1652 OverpassLookupMode overpass_mode) const { 1653 OverpassLookupMode overpass_local_mode = overpass_mode; 1654 const Klass* klass = this; 1655 while (klass != NULL) { 1656 Method* const method = InstanceKlass::cast(klass)->find_method_impl(name, 1657 signature, 1658 overpass_local_mode, 1659 find_static, 1660 find_private); 1661 if (method != NULL) { 1662 return method; 1663 } 1664 klass = klass->super(); 1665 overpass_local_mode = skip_overpass; // Always ignore overpass methods in superclasses 1666 } 1667 return NULL; 1668 } 1669 1670 #ifdef ASSERT 1671 // search through class hierarchy and return true if this class or 1672 // one of the superclasses was redefined 1673 bool InstanceKlass::has_redefined_this_or_super() const { 1674 const Klass* klass = this; 1675 while (klass != NULL) { 1676 if (InstanceKlass::cast(klass)->has_been_redefined()) { 1677 return true; 1678 } 1679 klass = klass->super(); 1680 } 1681 return false; 1682 } 1683 #endif 1684 1685 // lookup a method in the default methods list then in all transitive interfaces 1686 // Do NOT return private or static methods 1687 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name, 1688 Symbol* signature) const { 1689 Method* m = NULL; 1690 if (default_methods() != NULL) { 1691 m = find_method(default_methods(), name, signature); 1692 } 1693 // Look up interfaces 1694 if (m == NULL) { 1695 m = lookup_method_in_all_interfaces(name, signature, find_defaults); 1696 } 1697 return m; 1698 } 1699 1700 // lookup a method in all the interfaces that this class implements 1701 // Do NOT return private or static methods, new in JDK8 which are not externally visible 1702 // They should only be found in the initial InterfaceMethodRef 1703 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name, 1704 Symbol* signature, 1705 DefaultsLookupMode defaults_mode) const { 1706 Array<Klass*>* all_ifs = transitive_interfaces(); 1707 int num_ifs = all_ifs->length(); 1708 InstanceKlass *ik = NULL; 1709 for (int i = 0; i < num_ifs; i++) { 1710 ik = InstanceKlass::cast(all_ifs->at(i)); 1711 Method* m = ik->lookup_method(name, signature); 1712 if (m != NULL && m->is_public() && !m->is_static() && 1713 ((defaults_mode != skip_defaults) || !m->is_default_method())) { 1714 return m; 1715 } 1716 } 1717 return NULL; 1718 } 1719 1720 /* jni_id_for_impl for jfieldIds only */ 1721 JNIid* InstanceKlass::jni_id_for_impl(int offset) { 1722 MutexLocker ml(JfieldIdCreation_lock); 1723 // Retry lookup after we got the lock 1724 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1725 if (probe == NULL) { 1726 // Slow case, allocate new static field identifier 1727 probe = new JNIid(this, offset, jni_ids()); 1728 set_jni_ids(probe); 1729 } 1730 return probe; 1731 } 1732 1733 1734 /* jni_id_for for jfieldIds only */ 1735 JNIid* InstanceKlass::jni_id_for(int offset) { 1736 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1737 if (probe == NULL) { 1738 probe = jni_id_for_impl(offset); 1739 } 1740 return probe; 1741 } 1742 1743 u2 InstanceKlass::enclosing_method_data(int offset) const { 1744 const Array<jushort>* const inner_class_list = inner_classes(); 1745 if (inner_class_list == NULL) { 1746 return 0; 1747 } 1748 const int length = inner_class_list->length(); 1749 if (length % inner_class_next_offset == 0) { 1750 return 0; 1751 } 1752 const int index = length - enclosing_method_attribute_size; 1753 assert(offset < enclosing_method_attribute_size, "invalid offset"); 1754 return inner_class_list->at(index + offset); 1755 } 1756 1757 void InstanceKlass::set_enclosing_method_indices(u2 class_index, 1758 u2 method_index) { 1759 Array<jushort>* inner_class_list = inner_classes(); 1760 assert (inner_class_list != NULL, "_inner_classes list is not set up"); 1761 int length = inner_class_list->length(); 1762 if (length % inner_class_next_offset == enclosing_method_attribute_size) { 1763 int index = length - enclosing_method_attribute_size; 1764 inner_class_list->at_put( 1765 index + enclosing_method_class_index_offset, class_index); 1766 inner_class_list->at_put( 1767 index + enclosing_method_method_index_offset, method_index); 1768 } 1769 } 1770 1771 // Lookup or create a jmethodID. 1772 // This code is called by the VMThread and JavaThreads so the 1773 // locking has to be done very carefully to avoid deadlocks 1774 // and/or other cache consistency problems. 1775 // 1776 jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) { 1777 size_t idnum = (size_t)method_h->method_idnum(); 1778 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1779 size_t length = 0; 1780 jmethodID id = NULL; 1781 1782 // We use a double-check locking idiom here because this cache is 1783 // performance sensitive. In the normal system, this cache only 1784 // transitions from NULL to non-NULL which is safe because we use 1785 // release_set_methods_jmethod_ids() to advertise the new cache. 1786 // A partially constructed cache should never be seen by a racing 1787 // thread. We also use release_store() to save a new jmethodID 1788 // in the cache so a partially constructed jmethodID should never be 1789 // seen either. Cache reads of existing jmethodIDs proceed without a 1790 // lock, but cache writes of a new jmethodID requires uniqueness and 1791 // creation of the cache itself requires no leaks so a lock is 1792 // generally acquired in those two cases. 1793 // 1794 // If the RedefineClasses() API has been used, then this cache can 1795 // grow and we'll have transitions from non-NULL to bigger non-NULL. 1796 // Cache creation requires no leaks and we require safety between all 1797 // cache accesses and freeing of the old cache so a lock is generally 1798 // acquired when the RedefineClasses() API has been used. 1799 1800 if (jmeths != NULL) { 1801 // the cache already exists 1802 if (!idnum_can_increment()) { 1803 // the cache can't grow so we can just get the current values 1804 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1805 } else { 1806 // cache can grow so we have to be more careful 1807 if (Threads::number_of_threads() == 0 || 1808 SafepointSynchronize::is_at_safepoint()) { 1809 // we're single threaded or at a safepoint - no locking needed 1810 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1811 } else { 1812 MutexLocker ml(JmethodIdCreation_lock); 1813 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1814 } 1815 } 1816 } 1817 // implied else: 1818 // we need to allocate a cache so default length and id values are good 1819 1820 if (jmeths == NULL || // no cache yet 1821 length <= idnum || // cache is too short 1822 id == NULL) { // cache doesn't contain entry 1823 1824 // This function can be called by the VMThread so we have to do all 1825 // things that might block on a safepoint before grabbing the lock. 1826 // Otherwise, we can deadlock with the VMThread or have a cache 1827 // consistency issue. These vars keep track of what we might have 1828 // to free after the lock is dropped. 1829 jmethodID to_dealloc_id = NULL; 1830 jmethodID* to_dealloc_jmeths = NULL; 1831 1832 // may not allocate new_jmeths or use it if we allocate it 1833 jmethodID* new_jmeths = NULL; 1834 if (length <= idnum) { 1835 // allocate a new cache that might be used 1836 size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count()); 1837 new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass); 1838 memset(new_jmeths, 0, (size+1)*sizeof(jmethodID)); 1839 // cache size is stored in element[0], other elements offset by one 1840 new_jmeths[0] = (jmethodID)size; 1841 } 1842 1843 // allocate a new jmethodID that might be used 1844 jmethodID new_id = NULL; 1845 if (method_h->is_old() && !method_h->is_obsolete()) { 1846 // The method passed in is old (but not obsolete), we need to use the current version 1847 Method* current_method = method_with_idnum((int)idnum); 1848 assert(current_method != NULL, "old and but not obsolete, so should exist"); 1849 new_id = Method::make_jmethod_id(class_loader_data(), current_method); 1850 } else { 1851 // It is the current version of the method or an obsolete method, 1852 // use the version passed in 1853 new_id = Method::make_jmethod_id(class_loader_data(), method_h()); 1854 } 1855 1856 if (Threads::number_of_threads() == 0 || 1857 SafepointSynchronize::is_at_safepoint()) { 1858 // we're single threaded or at a safepoint - no locking needed 1859 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths, 1860 &to_dealloc_id, &to_dealloc_jmeths); 1861 } else { 1862 MutexLocker ml(JmethodIdCreation_lock); 1863 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths, 1864 &to_dealloc_id, &to_dealloc_jmeths); 1865 } 1866 1867 // The lock has been dropped so we can free resources. 1868 // Free up either the old cache or the new cache if we allocated one. 1869 if (to_dealloc_jmeths != NULL) { 1870 FreeHeap(to_dealloc_jmeths); 1871 } 1872 // free up the new ID since it wasn't needed 1873 if (to_dealloc_id != NULL) { 1874 Method::destroy_jmethod_id(class_loader_data(), to_dealloc_id); 1875 } 1876 } 1877 return id; 1878 } 1879 1880 // Figure out how many jmethodIDs haven't been allocated, and make 1881 // sure space for them is pre-allocated. This makes getting all 1882 // method ids much, much faster with classes with more than 8 1883 // methods, and has a *substantial* effect on performance with jvmti 1884 // code that loads all jmethodIDs for all classes. 1885 void InstanceKlass::ensure_space_for_methodids(int start_offset) { 1886 int new_jmeths = 0; 1887 int length = methods()->length(); 1888 for (int index = start_offset; index < length; index++) { 1889 Method* m = methods()->at(index); 1890 jmethodID id = m->find_jmethod_id_or_null(); 1891 if (id == NULL) { 1892 new_jmeths++; 1893 } 1894 } 1895 if (new_jmeths != 0) { 1896 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths); 1897 } 1898 } 1899 1900 // Common code to fetch the jmethodID from the cache or update the 1901 // cache with the new jmethodID. This function should never do anything 1902 // that causes the caller to go to a safepoint or we can deadlock with 1903 // the VMThread or have cache consistency issues. 1904 // 1905 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update( 1906 size_t idnum, jmethodID new_id, 1907 jmethodID* new_jmeths, jmethodID* to_dealloc_id_p, 1908 jmethodID** to_dealloc_jmeths_p) { 1909 assert(new_id != NULL, "sanity check"); 1910 assert(to_dealloc_id_p != NULL, "sanity check"); 1911 assert(to_dealloc_jmeths_p != NULL, "sanity check"); 1912 assert(Threads::number_of_threads() == 0 || 1913 SafepointSynchronize::is_at_safepoint() || 1914 JmethodIdCreation_lock->owned_by_self(), "sanity check"); 1915 1916 // reacquire the cache - we are locked, single threaded or at a safepoint 1917 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1918 jmethodID id = NULL; 1919 size_t length = 0; 1920 1921 if (jmeths == NULL || // no cache yet 1922 (length = (size_t)jmeths[0]) <= idnum) { // cache is too short 1923 if (jmeths != NULL) { 1924 // copy any existing entries from the old cache 1925 for (size_t index = 0; index < length; index++) { 1926 new_jmeths[index+1] = jmeths[index+1]; 1927 } 1928 *to_dealloc_jmeths_p = jmeths; // save old cache for later delete 1929 } 1930 release_set_methods_jmethod_ids(jmeths = new_jmeths); 1931 } else { 1932 // fetch jmethodID (if any) from the existing cache 1933 id = jmeths[idnum+1]; 1934 *to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete 1935 } 1936 if (id == NULL) { 1937 // No matching jmethodID in the existing cache or we have a new 1938 // cache or we just grew the cache. This cache write is done here 1939 // by the first thread to win the foot race because a jmethodID 1940 // needs to be unique once it is generally available. 1941 id = new_id; 1942 1943 // The jmethodID cache can be read while unlocked so we have to 1944 // make sure the new jmethodID is complete before installing it 1945 // in the cache. 1946 OrderAccess::release_store(&jmeths[idnum+1], id); 1947 } else { 1948 *to_dealloc_id_p = new_id; // save new id for later delete 1949 } 1950 return id; 1951 } 1952 1953 1954 // Common code to get the jmethodID cache length and the jmethodID 1955 // value at index idnum if there is one. 1956 // 1957 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache, 1958 size_t idnum, size_t *length_p, jmethodID* id_p) { 1959 assert(cache != NULL, "sanity check"); 1960 assert(length_p != NULL, "sanity check"); 1961 assert(id_p != NULL, "sanity check"); 1962 1963 // cache size is stored in element[0], other elements offset by one 1964 *length_p = (size_t)cache[0]; 1965 if (*length_p <= idnum) { // cache is too short 1966 *id_p = NULL; 1967 } else { 1968 *id_p = cache[idnum+1]; // fetch jmethodID (if any) 1969 } 1970 } 1971 1972 1973 // Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles 1974 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) { 1975 size_t idnum = (size_t)method->method_idnum(); 1976 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1977 size_t length; // length assigned as debugging crumb 1978 jmethodID id = NULL; 1979 if (jmeths != NULL && // If there is a cache 1980 (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough, 1981 id = jmeths[idnum+1]; // Look up the id (may be NULL) 1982 } 1983 return id; 1984 } 1985 1986 inline DependencyContext InstanceKlass::dependencies() { 1987 DependencyContext dep_context(&_dep_context); 1988 return dep_context; 1989 } 1990 1991 int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) { 1992 return dependencies().mark_dependent_nmethods(changes); 1993 } 1994 1995 void InstanceKlass::add_dependent_nmethod(nmethod* nm) { 1996 dependencies().add_dependent_nmethod(nm); 1997 } 1998 1999 void InstanceKlass::remove_dependent_nmethod(nmethod* nm, bool delete_immediately) { 2000 dependencies().remove_dependent_nmethod(nm, delete_immediately); 2001 } 2002 2003 #ifndef PRODUCT 2004 void InstanceKlass::print_dependent_nmethods(bool verbose) { 2005 dependencies().print_dependent_nmethods(verbose); 2006 } 2007 2008 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) { 2009 return dependencies().is_dependent_nmethod(nm); 2010 } 2011 #endif //PRODUCT 2012 2013 void InstanceKlass::clean_weak_instanceklass_links(BoolObjectClosure* is_alive) { 2014 clean_implementors_list(is_alive); 2015 clean_method_data(is_alive); 2016 2017 // Since GC iterates InstanceKlasses sequentially, it is safe to remove stale entries here. 2018 DependencyContext dep_context(&_dep_context); 2019 dep_context.expunge_stale_entries(); 2020 } 2021 2022 void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) { 2023 assert(class_loader_data()->is_alive(is_alive), "this klass should be live"); 2024 if (is_interface()) { 2025 if (ClassUnloading) { 2026 Klass* impl = implementor(); 2027 if (impl != NULL) { 2028 if (!impl->is_loader_alive(is_alive)) { 2029 // remove this guy 2030 Klass** klass = adr_implementor(); 2031 assert(klass != NULL, "null klass"); 2032 if (klass != NULL) { 2033 *klass = NULL; 2034 } 2035 } 2036 } 2037 } 2038 } 2039 } 2040 2041 void InstanceKlass::clean_method_data(BoolObjectClosure* is_alive) { 2042 for (int m = 0; m < methods()->length(); m++) { 2043 MethodData* mdo = methods()->at(m)->method_data(); 2044 if (mdo != NULL) { 2045 mdo->clean_method_data(is_alive); 2046 } 2047 } 2048 } 2049 2050 bool InstanceKlass::supers_have_passed_fingerprint_checks() { 2051 if (java_super() != NULL && !java_super()->has_passed_fingerprint_check()) { 2052 ResourceMark rm; 2053 log_trace(class, fingerprint)("%s : super %s not fingerprinted", external_name(), java_super()->external_name()); 2054 return false; 2055 } 2056 2057 Array<Klass*>* local_interfaces = this->local_interfaces(); 2058 if (local_interfaces != NULL) { 2059 int length = local_interfaces->length(); 2060 for (int i = 0; i < length; i++) { 2061 InstanceKlass* intf = InstanceKlass::cast(local_interfaces->at(i)); 2062 if (!intf->has_passed_fingerprint_check()) { 2063 ResourceMark rm; 2064 log_trace(class, fingerprint)("%s : interface %s not fingerprinted", external_name(), intf->external_name()); 2065 return false; 2066 } 2067 } 2068 } 2069 2070 return true; 2071 } 2072 2073 bool InstanceKlass::should_store_fingerprint(bool is_anonymous) { 2074 #if INCLUDE_AOT 2075 // We store the fingerprint into the InstanceKlass only in the following 2 cases: 2076 if (CalculateClassFingerprint) { 2077 // (1) We are running AOT to generate a shared library. 2078 return true; 2079 } 2080 if (DumpSharedSpaces) { 2081 // (2) We are running -Xshare:dump to create a shared archive 2082 return true; 2083 } 2084 if (UseAOT && is_anonymous) { 2085 // (3) We are using AOT code from a shared library and see an anonymous class 2086 return true; 2087 } 2088 #endif 2089 2090 // In all other cases we might set the _misc_has_passed_fingerprint_check bit, 2091 // but do not store the 64-bit fingerprint to save space. 2092 return false; 2093 } 2094 2095 bool InstanceKlass::has_stored_fingerprint() const { 2096 #if INCLUDE_AOT 2097 return should_store_fingerprint() || is_shared(); 2098 #else 2099 return false; 2100 #endif 2101 } 2102 2103 uint64_t InstanceKlass::get_stored_fingerprint() const { 2104 address adr = adr_fingerprint(); 2105 if (adr != NULL) { 2106 return (uint64_t)Bytes::get_native_u8(adr); // adr may not be 64-bit aligned 2107 } 2108 return 0; 2109 } 2110 2111 void InstanceKlass::store_fingerprint(uint64_t fingerprint) { 2112 address adr = adr_fingerprint(); 2113 if (adr != NULL) { 2114 Bytes::put_native_u8(adr, (u8)fingerprint); // adr may not be 64-bit aligned 2115 2116 ResourceMark rm; 2117 log_trace(class, fingerprint)("stored as " PTR64_FORMAT " for class %s", fingerprint, external_name()); 2118 } 2119 } 2120 2121 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) { 2122 Klass::metaspace_pointers_do(it); 2123 2124 if (log_is_enabled(Trace, cds)) { 2125 ResourceMark rm; 2126 log_trace(cds)("Iter(InstanceKlass): %p (%s)", this, external_name()); 2127 } 2128 2129 it->push(&_annotations); 2130 it->push((Klass**)&_array_klasses); 2131 it->push(&_constants); 2132 it->push(&_inner_classes); 2133 it->push(&_array_name); 2134 #if INCLUDE_JVMTI 2135 it->push(&_previous_versions); 2136 #endif 2137 it->push(&_methods); 2138 it->push(&_default_methods); 2139 it->push(&_local_interfaces); 2140 it->push(&_transitive_interfaces); 2141 it->push(&_method_ordering); 2142 it->push(&_default_vtable_indices); 2143 it->push(&_fields); 2144 2145 if (itable_length() > 0) { 2146 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2147 int method_table_offset_in_words = ioe->offset()/wordSize; 2148 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 2149 / itableOffsetEntry::size(); 2150 2151 for (int i = 0; i < nof_interfaces; i ++, ioe ++) { 2152 if (ioe->interface_klass() != NULL) { 2153 it->push(ioe->interface_klass_addr()); 2154 itableMethodEntry* ime = ioe->first_method_entry(this); 2155 int n = klassItable::method_count_for_interface(ioe->interface_klass()); 2156 for (int index = 0; index < n; index ++) { 2157 it->push(ime[index].method_addr()); 2158 } 2159 } 2160 } 2161 } 2162 } 2163 2164 void InstanceKlass::remove_unshareable_info() { 2165 Klass::remove_unshareable_info(); 2166 2167 if (is_in_error_state()) { 2168 // Classes are attempted to link during dumping and may fail, 2169 // but these classes are still in the dictionary and class list in CLD. 2170 // Check in_error state first because in_error is > linked state, so 2171 // is_linked() is true. 2172 // If there's a linking error, there is nothing else to remove. 2173 return; 2174 } 2175 2176 // Unlink the class 2177 if (is_linked()) { 2178 unlink_class(); 2179 } 2180 init_implementor(); 2181 2182 constants()->remove_unshareable_info(); 2183 2184 for (int i = 0; i < methods()->length(); i++) { 2185 Method* m = methods()->at(i); 2186 m->remove_unshareable_info(); 2187 } 2188 2189 // do array classes also. 2190 if (array_klasses() != NULL) { 2191 array_klasses()->remove_unshareable_info(); 2192 } 2193 2194 // These are not allocated from metaspace, but they should should all be empty 2195 // during dump time, so we don't need to worry about them in InstanceKlass::iterate(). 2196 guarantee(_source_debug_extension == NULL, "must be"); 2197 guarantee(_dep_context == DependencyContext::EMPTY, "must be"); 2198 guarantee(_osr_nmethods_head == NULL, "must be"); 2199 2200 #if INCLUDE_JVMTI 2201 guarantee(_breakpoints == NULL, "must be"); 2202 guarantee(_previous_versions == NULL, "must be"); 2203 #endif 2204 2205 _init_thread = NULL; 2206 _methods_jmethod_ids = NULL; 2207 _jni_ids = NULL; 2208 _oop_map_cache = NULL; 2209 } 2210 2211 void InstanceKlass::remove_java_mirror() { 2212 Klass::remove_java_mirror(); 2213 2214 // do array classes also. 2215 if (array_klasses() != NULL) { 2216 array_klasses()->remove_java_mirror(); 2217 } 2218 } 2219 2220 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) { 2221 set_package(loader_data, CHECK); 2222 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK); 2223 2224 Array<Method*>* methods = this->methods(); 2225 int num_methods = methods->length(); 2226 for (int index2 = 0; index2 < num_methods; ++index2) { 2227 methodHandle m(THREAD, methods->at(index2)); 2228 m->restore_unshareable_info(CHECK); 2229 } 2230 if (JvmtiExport::has_redefined_a_class()) { 2231 // Reinitialize vtable because RedefineClasses may have changed some 2232 // entries in this vtable for super classes so the CDS vtable might 2233 // point to old or obsolete entries. RedefineClasses doesn't fix up 2234 // vtables in the shared system dictionary, only the main one. 2235 // It also redefines the itable too so fix that too. 2236 ResourceMark rm(THREAD); 2237 vtable().initialize_vtable(false, CHECK); 2238 itable().initialize_itable(false, CHECK); 2239 } 2240 2241 // restore constant pool resolved references 2242 constants()->restore_unshareable_info(CHECK); 2243 2244 if (array_klasses() != NULL) { 2245 // Array classes have null protection domain. 2246 // --> see ArrayKlass::complete_create_array_klass() 2247 array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK); 2248 } 2249 } 2250 2251 // returns true IFF is_in_error_state() has been changed as a result of this call. 2252 bool InstanceKlass::check_sharing_error_state() { 2253 assert(DumpSharedSpaces, "should only be called during dumping"); 2254 bool old_state = is_in_error_state(); 2255 2256 if (!is_in_error_state()) { 2257 bool bad = false; 2258 for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) { 2259 if (sup->is_in_error_state()) { 2260 bad = true; 2261 break; 2262 } 2263 } 2264 if (!bad) { 2265 Array<Klass*>* interfaces = transitive_interfaces(); 2266 for (int i = 0; i < interfaces->length(); i++) { 2267 Klass* iface = interfaces->at(i); 2268 if (InstanceKlass::cast(iface)->is_in_error_state()) { 2269 bad = true; 2270 break; 2271 } 2272 } 2273 } 2274 2275 if (bad) { 2276 set_in_error_state(); 2277 } 2278 } 2279 2280 return (old_state != is_in_error_state()); 2281 } 2282 2283 #if INCLUDE_JVMTI 2284 static void clear_all_breakpoints(Method* m) { 2285 m->clear_all_breakpoints(); 2286 } 2287 #endif 2288 2289 void InstanceKlass::notify_unload_class(InstanceKlass* ik) { 2290 // notify the debugger 2291 if (JvmtiExport::should_post_class_unload()) { 2292 JvmtiExport::post_class_unload(ik); 2293 } 2294 2295 // notify ClassLoadingService of class unload 2296 ClassLoadingService::notify_class_unloaded(ik); 2297 } 2298 2299 void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) { 2300 // Clean up C heap 2301 ik->release_C_heap_structures(); 2302 ik->constants()->release_C_heap_structures(); 2303 } 2304 2305 void InstanceKlass::release_C_heap_structures() { 2306 // Can't release the constant pool here because the constant pool can be 2307 // deallocated separately from the InstanceKlass for default methods and 2308 // redefine classes. 2309 2310 // Deallocate oop map cache 2311 if (_oop_map_cache != NULL) { 2312 delete _oop_map_cache; 2313 _oop_map_cache = NULL; 2314 } 2315 2316 // Deallocate JNI identifiers for jfieldIDs 2317 JNIid::deallocate(jni_ids()); 2318 set_jni_ids(NULL); 2319 2320 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2321 if (jmeths != (jmethodID*)NULL) { 2322 release_set_methods_jmethod_ids(NULL); 2323 FreeHeap(jmeths); 2324 } 2325 2326 // Release dependencies. 2327 // It is desirable to use DC::remove_all_dependents() here, but, unfortunately, 2328 // it is not safe (see JDK-8143408). The problem is that the klass dependency 2329 // context can contain live dependencies, since there's a race between nmethod & 2330 // klass unloading. If the klass is dead when nmethod unloading happens, relevant 2331 // dependencies aren't removed from the context associated with the class (see 2332 // nmethod::flush_dependencies). It ends up during klass unloading as seemingly 2333 // live dependencies pointing to unloaded nmethods and causes a crash in 2334 // DC::remove_all_dependents() when it touches unloaded nmethod. 2335 dependencies().wipe(); 2336 2337 #if INCLUDE_JVMTI 2338 // Deallocate breakpoint records 2339 if (breakpoints() != 0x0) { 2340 methods_do(clear_all_breakpoints); 2341 assert(breakpoints() == 0x0, "should have cleared breakpoints"); 2342 } 2343 2344 // deallocate the cached class file 2345 if (_cached_class_file != NULL && !MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 2346 os::free(_cached_class_file); 2347 _cached_class_file = NULL; 2348 } 2349 #endif 2350 2351 // Decrement symbol reference counts associated with the unloaded class. 2352 if (_name != NULL) _name->decrement_refcount(); 2353 // unreference array name derived from this class name (arrays of an unloaded 2354 // class can't be referenced anymore). 2355 if (_array_name != NULL) _array_name->decrement_refcount(); 2356 if (_value_types != NULL) { 2357 for (int i = 0; i < _value_types->length(); i++) { 2358 Symbol* s = _value_types->at(i)._class_name; 2359 if (s != NULL) { 2360 s->decrement_refcount(); 2361 } 2362 } 2363 } 2364 if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension); 2365 } 2366 2367 void InstanceKlass::set_source_debug_extension(const char* array, int length) { 2368 if (array == NULL) { 2369 _source_debug_extension = NULL; 2370 } else { 2371 // Adding one to the attribute length in order to store a null terminator 2372 // character could cause an overflow because the attribute length is 2373 // already coded with an u4 in the classfile, but in practice, it's 2374 // unlikely to happen. 2375 assert((length+1) > length, "Overflow checking"); 2376 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass); 2377 for (int i = 0; i < length; i++) { 2378 sde[i] = array[i]; 2379 } 2380 sde[length] = '\0'; 2381 _source_debug_extension = sde; 2382 } 2383 } 2384 2385 address InstanceKlass::static_field_addr(int offset) { 2386 assert(offset >= InstanceMirrorKlass::offset_of_static_fields(), "has already been adjusted"); 2387 return (address)(offset + cast_from_oop<intptr_t>(java_mirror())); 2388 } 2389 2390 2391 const char* InstanceKlass::signature_name() const { 2392 int hash_len = 0; 2393 char hash_buf[40]; 2394 2395 // If this is an anonymous class, append a hash to make the name unique 2396 if (is_anonymous()) { 2397 intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0; 2398 jio_snprintf(hash_buf, sizeof(hash_buf), "/" UINTX_FORMAT, (uintx)hash); 2399 hash_len = (int)strlen(hash_buf); 2400 } 2401 2402 // Get the internal name as a c string 2403 const char* src = (const char*) (name()->as_C_string()); 2404 const int src_length = (int)strlen(src); 2405 2406 char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3); 2407 2408 // Add L as type indicator 2409 int dest_index = 0; 2410 dest[dest_index++] = 'L'; 2411 2412 // Add the actual class name 2413 for (int src_index = 0; src_index < src_length; ) { 2414 dest[dest_index++] = src[src_index++]; 2415 } 2416 2417 // If we have a hash, append it 2418 for (int hash_index = 0; hash_index < hash_len; ) { 2419 dest[dest_index++] = hash_buf[hash_index++]; 2420 } 2421 2422 // Add the semicolon and the NULL 2423 dest[dest_index++] = ';'; 2424 dest[dest_index] = '\0'; 2425 return dest; 2426 } 2427 2428 // Used to obtain the package name from a fully qualified class name. 2429 Symbol* InstanceKlass::package_from_name(const Symbol* name, TRAPS) { 2430 if (name == NULL) { 2431 return NULL; 2432 } else { 2433 if (name->utf8_length() <= 0) { 2434 return NULL; 2435 } 2436 ResourceMark rm; 2437 const char* package_name = ClassLoader::package_from_name((const char*) name->as_C_string()); 2438 if (package_name == NULL) { 2439 return NULL; 2440 } 2441 Symbol* pkg_name = SymbolTable::new_symbol(package_name, THREAD); 2442 return pkg_name; 2443 } 2444 } 2445 2446 ModuleEntry* InstanceKlass::module() const { 2447 if (!in_unnamed_package()) { 2448 return _package_entry->module(); 2449 } 2450 const Klass* host = host_klass(); 2451 if (host == NULL) { 2452 return class_loader_data()->unnamed_module(); 2453 } 2454 return host->class_loader_data()->unnamed_module(); 2455 } 2456 2457 void InstanceKlass::set_package(ClassLoaderData* loader_data, TRAPS) { 2458 2459 // ensure java/ packages only loaded by boot or platform builtin loaders 2460 Handle class_loader(THREAD, loader_data->class_loader()); 2461 check_prohibited_package(name(), class_loader, CHECK); 2462 2463 TempNewSymbol pkg_name = package_from_name(name(), CHECK); 2464 2465 if (pkg_name != NULL && loader_data != NULL) { 2466 2467 // Find in class loader's package entry table. 2468 _package_entry = loader_data->packages()->lookup_only(pkg_name); 2469 2470 // If the package name is not found in the loader's package 2471 // entry table, it is an indication that the package has not 2472 // been defined. Consider it defined within the unnamed module. 2473 if (_package_entry == NULL) { 2474 ResourceMark rm; 2475 2476 if (!ModuleEntryTable::javabase_defined()) { 2477 // Before java.base is defined during bootstrapping, define all packages in 2478 // the java.base module. If a non-java.base package is erroneously placed 2479 // in the java.base module it will be caught later when java.base 2480 // is defined by ModuleEntryTable::verify_javabase_packages check. 2481 assert(ModuleEntryTable::javabase_moduleEntry() != NULL, JAVA_BASE_NAME " module is NULL"); 2482 _package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry()); 2483 } else { 2484 assert(loader_data->unnamed_module() != NULL, "unnamed module is NULL"); 2485 _package_entry = loader_data->packages()->lookup(pkg_name, 2486 loader_data->unnamed_module()); 2487 } 2488 2489 // A package should have been successfully created 2490 assert(_package_entry != NULL, "Package entry for class %s not found, loader %s", 2491 name()->as_C_string(), loader_data->loader_name()); 2492 } 2493 2494 if (log_is_enabled(Debug, module)) { 2495 ResourceMark rm; 2496 ModuleEntry* m = _package_entry->module(); 2497 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s", 2498 external_name(), 2499 pkg_name->as_C_string(), 2500 loader_data->loader_name(), 2501 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE)); 2502 } 2503 } else { 2504 ResourceMark rm; 2505 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s", 2506 external_name(), 2507 (loader_data != NULL) ? loader_data->loader_name() : "NULL", 2508 UNNAMED_MODULE); 2509 } 2510 } 2511 2512 2513 // different versions of is_same_class_package 2514 2515 bool InstanceKlass::is_same_class_package(const Klass* class2) const { 2516 oop classloader1 = this->class_loader(); 2517 PackageEntry* classpkg1 = this->package(); 2518 if (class2->is_objArray_klass()) { 2519 class2 = ObjArrayKlass::cast(class2)->bottom_klass(); 2520 } 2521 2522 oop classloader2; 2523 PackageEntry* classpkg2; 2524 if (class2->is_instance_klass()) { 2525 classloader2 = class2->class_loader(); 2526 classpkg2 = class2->package(); 2527 } else { 2528 assert(class2->is_typeArray_klass(), "should be type array"); 2529 classloader2 = NULL; 2530 classpkg2 = NULL; 2531 } 2532 2533 // Same package is determined by comparing class loader 2534 // and package entries. Both must be the same. This rule 2535 // applies even to classes that are defined in the unnamed 2536 // package, they still must have the same class loader. 2537 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) { 2538 return true; 2539 } 2540 2541 return false; 2542 } 2543 2544 // return true if this class and other_class are in the same package. Classloader 2545 // and classname information is enough to determine a class's package 2546 bool InstanceKlass::is_same_class_package(oop other_class_loader, 2547 const Symbol* other_class_name) const { 2548 if (class_loader() != other_class_loader) { 2549 return false; 2550 } 2551 if (name()->fast_compare(other_class_name) == 0) { 2552 return true; 2553 } 2554 2555 { 2556 ResourceMark rm; 2557 2558 bool bad_class_name = false; 2559 const char* other_pkg = 2560 ClassLoader::package_from_name((const char*) other_class_name->as_C_string(), &bad_class_name); 2561 if (bad_class_name) { 2562 return false; 2563 } 2564 // Check that package_from_name() returns NULL, not "", if there is no package. 2565 assert(other_pkg == NULL || strlen(other_pkg) > 0, "package name is empty string"); 2566 2567 const Symbol* const this_package_name = 2568 this->package() != NULL ? this->package()->name() : NULL; 2569 2570 if (this_package_name == NULL || other_pkg == NULL) { 2571 // One of the two doesn't have a package. Only return true if the other 2572 // one also doesn't have a package. 2573 return (const char*)this_package_name == other_pkg; 2574 } 2575 2576 // Check if package is identical 2577 return this_package_name->equals(other_pkg); 2578 } 2579 } 2580 2581 // Returns true iff super_method can be overridden by a method in targetclassname 2582 // See JLS 3rd edition 8.4.6.1 2583 // Assumes name-signature match 2584 // "this" is InstanceKlass of super_method which must exist 2585 // note that the InstanceKlass of the method in the targetclassname has not always been created yet 2586 bool InstanceKlass::is_override(const methodHandle& super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) { 2587 // Private methods can not be overridden 2588 if (super_method->is_private()) { 2589 return false; 2590 } 2591 // If super method is accessible, then override 2592 if ((super_method->is_protected()) || 2593 (super_method->is_public())) { 2594 return true; 2595 } 2596 // Package-private methods are not inherited outside of package 2597 assert(super_method->is_package_private(), "must be package private"); 2598 return(is_same_class_package(targetclassloader(), targetclassname)); 2599 } 2600 2601 // Only boot and platform class loaders can define classes in "java/" packages. 2602 void InstanceKlass::check_prohibited_package(Symbol* class_name, 2603 Handle class_loader, 2604 TRAPS) { 2605 if (!class_loader.is_null() && 2606 !SystemDictionary::is_platform_class_loader(class_loader()) && 2607 class_name != NULL) { 2608 ResourceMark rm(THREAD); 2609 char* name = class_name->as_C_string(); 2610 if (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/') { 2611 TempNewSymbol pkg_name = InstanceKlass::package_from_name(class_name, CHECK); 2612 assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'"); 2613 name = pkg_name->as_C_string(); 2614 const char* class_loader_name = SystemDictionary::loader_name(class_loader()); 2615 StringUtils::replace_no_expand(name, "/", "."); 2616 const char* msg_text1 = "Class loader (instance of): "; 2617 const char* msg_text2 = " tried to load prohibited package name: "; 2618 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1; 2619 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len); 2620 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name); 2621 THROW_MSG(vmSymbols::java_lang_SecurityException(), message); 2622 } 2623 } 2624 return; 2625 } 2626 2627 // tell if two classes have the same enclosing class (at package level) 2628 bool InstanceKlass::is_same_package_member(const Klass* class2, TRAPS) const { 2629 if (class2 == this) return true; 2630 if (!class2->is_instance_klass()) return false; 2631 2632 // must be in same package before we try anything else 2633 if (!is_same_class_package(class2)) 2634 return false; 2635 2636 // As long as there is an outer_this.getEnclosingClass, 2637 // shift the search outward. 2638 const InstanceKlass* outer_this = this; 2639 for (;;) { 2640 // As we walk along, look for equalities between outer_this and class2. 2641 // Eventually, the walks will terminate as outer_this stops 2642 // at the top-level class around the original class. 2643 bool ignore_inner_is_member; 2644 const Klass* next = outer_this->compute_enclosing_class(&ignore_inner_is_member, 2645 CHECK_false); 2646 if (next == NULL) break; 2647 if (next == class2) return true; 2648 outer_this = InstanceKlass::cast(next); 2649 } 2650 2651 // Now do the same for class2. 2652 const InstanceKlass* outer2 = InstanceKlass::cast(class2); 2653 for (;;) { 2654 bool ignore_inner_is_member; 2655 Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member, 2656 CHECK_false); 2657 if (next == NULL) break; 2658 // Might as well check the new outer against all available values. 2659 if (next == this) return true; 2660 if (next == outer_this) return true; 2661 outer2 = InstanceKlass::cast(next); 2662 } 2663 2664 // If by this point we have not found an equality between the 2665 // two classes, we know they are in separate package members. 2666 return false; 2667 } 2668 2669 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const { 2670 constantPoolHandle i_cp(THREAD, constants()); 2671 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) { 2672 int ioff = iter.inner_class_info_index(); 2673 if (ioff != 0) { 2674 // Check to see if the name matches the class we're looking for 2675 // before attempting to find the class. 2676 if (i_cp->klass_name_at_matches(this, ioff)) { 2677 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false); 2678 if (this == inner_klass) { 2679 *ooff = iter.outer_class_info_index(); 2680 *noff = iter.inner_name_index(); 2681 return true; 2682 } 2683 } 2684 } 2685 } 2686 return false; 2687 } 2688 2689 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const { 2690 InstanceKlass* outer_klass = NULL; 2691 *inner_is_member = false; 2692 int ooff = 0, noff = 0; 2693 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD); 2694 if (has_inner_classes_attr) { 2695 constantPoolHandle i_cp(THREAD, constants()); 2696 if (ooff != 0) { 2697 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL); 2698 outer_klass = InstanceKlass::cast(ok); 2699 *inner_is_member = true; 2700 } 2701 if (NULL == outer_klass) { 2702 // It may be anonymous; try for that. 2703 int encl_method_class_idx = enclosing_method_class_index(); 2704 if (encl_method_class_idx != 0) { 2705 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL); 2706 outer_klass = InstanceKlass::cast(ok); 2707 *inner_is_member = false; 2708 } 2709 } 2710 } 2711 2712 // If no inner class attribute found for this class. 2713 if (NULL == outer_klass) return NULL; 2714 2715 // Throws an exception if outer klass has not declared k as an inner klass 2716 // We need evidence that each klass knows about the other, or else 2717 // the system could allow a spoof of an inner class to gain access rights. 2718 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL); 2719 return outer_klass; 2720 } 2721 2722 jint InstanceKlass::compute_modifier_flags(TRAPS) const { 2723 jint access = access_flags().as_int(); 2724 2725 // But check if it happens to be member class. 2726 InnerClassesIterator iter(this); 2727 for (; !iter.done(); iter.next()) { 2728 int ioff = iter.inner_class_info_index(); 2729 // Inner class attribute can be zero, skip it. 2730 // Strange but true: JVM spec. allows null inner class refs. 2731 if (ioff == 0) continue; 2732 2733 // only look at classes that are already loaded 2734 // since we are looking for the flags for our self. 2735 Symbol* inner_name = constants()->klass_name_at(ioff); 2736 if (name() == inner_name) { 2737 // This is really a member class. 2738 access = iter.inner_access_flags(); 2739 break; 2740 } 2741 } 2742 // Remember to strip ACC_SUPER bit 2743 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS; 2744 } 2745 2746 jint InstanceKlass::jvmti_class_status() const { 2747 jint result = 0; 2748 2749 if (is_linked()) { 2750 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED; 2751 } 2752 2753 if (is_initialized()) { 2754 assert(is_linked(), "Class status is not consistent"); 2755 result |= JVMTI_CLASS_STATUS_INITIALIZED; 2756 } 2757 if (is_in_error_state()) { 2758 result |= JVMTI_CLASS_STATUS_ERROR; 2759 } 2760 return result; 2761 } 2762 2763 Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) { 2764 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2765 int method_table_offset_in_words = ioe->offset()/wordSize; 2766 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 2767 / itableOffsetEntry::size(); 2768 2769 for (int cnt = 0 ; ; cnt ++, ioe ++) { 2770 // If the interface isn't implemented by the receiver class, 2771 // the VM should throw IncompatibleClassChangeError. 2772 if (cnt >= nof_interfaces) { 2773 THROW_NULL(vmSymbols::java_lang_IncompatibleClassChangeError()); 2774 } 2775 2776 Klass* ik = ioe->interface_klass(); 2777 if (ik == holder) break; 2778 } 2779 2780 itableMethodEntry* ime = ioe->first_method_entry(this); 2781 Method* m = ime[index].method(); 2782 if (m == NULL) { 2783 THROW_NULL(vmSymbols::java_lang_AbstractMethodError()); 2784 } 2785 return m; 2786 } 2787 2788 2789 #if INCLUDE_JVMTI 2790 // update default_methods for redefineclasses for methods that are 2791 // not yet in the vtable due to concurrent subclass define and superinterface 2792 // redefinition 2793 // Note: those in the vtable, should have been updated via adjust_method_entries 2794 void InstanceKlass::adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed) { 2795 // search the default_methods for uses of either obsolete or EMCP methods 2796 if (default_methods() != NULL) { 2797 for (int index = 0; index < default_methods()->length(); index ++) { 2798 Method* old_method = default_methods()->at(index); 2799 if (old_method == NULL || old_method->method_holder() != holder || !old_method->is_old()) { 2800 continue; // skip uninteresting entries 2801 } 2802 assert(!old_method->is_deleted(), "default methods may not be deleted"); 2803 2804 Method* new_method = holder->method_with_idnum(old_method->orig_method_idnum()); 2805 2806 assert(new_method != NULL, "method_with_idnum() should not be NULL"); 2807 assert(old_method != new_method, "sanity check"); 2808 2809 default_methods()->at_put(index, new_method); 2810 if (log_is_enabled(Info, redefine, class, update)) { 2811 ResourceMark rm; 2812 if (!(*trace_name_printed)) { 2813 log_info(redefine, class, update) 2814 ("adjust: klassname=%s default methods from name=%s", 2815 external_name(), old_method->method_holder()->external_name()); 2816 *trace_name_printed = true; 2817 } 2818 log_debug(redefine, class, update, vtables) 2819 ("default method update: %s(%s) ", 2820 new_method->name()->as_C_string(), new_method->signature()->as_C_string()); 2821 } 2822 } 2823 } 2824 } 2825 #endif // INCLUDE_JVMTI 2826 2827 // On-stack replacement stuff 2828 void InstanceKlass::add_osr_nmethod(nmethod* n) { 2829 // only one compilation can be active 2830 { 2831 // This is a short non-blocking critical region, so the no safepoint check is ok. 2832 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2833 assert(n->is_osr_method(), "wrong kind of nmethod"); 2834 n->set_osr_link(osr_nmethods_head()); 2835 set_osr_nmethods_head(n); 2836 // Raise the highest osr level if necessary 2837 if (TieredCompilation) { 2838 Method* m = n->method(); 2839 m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level())); 2840 } 2841 } 2842 2843 // Get rid of the osr methods for the same bci that have lower levels. 2844 if (TieredCompilation) { 2845 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) { 2846 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true); 2847 if (inv != NULL && inv->is_in_use()) { 2848 inv->make_not_entrant(); 2849 } 2850 } 2851 } 2852 } 2853 2854 // Remove osr nmethod from the list. Return true if found and removed. 2855 bool InstanceKlass::remove_osr_nmethod(nmethod* n) { 2856 // This is a short non-blocking critical region, so the no safepoint check is ok. 2857 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2858 assert(n->is_osr_method(), "wrong kind of nmethod"); 2859 nmethod* last = NULL; 2860 nmethod* cur = osr_nmethods_head(); 2861 int max_level = CompLevel_none; // Find the max comp level excluding n 2862 Method* m = n->method(); 2863 // Search for match 2864 bool found = false; 2865 while(cur != NULL && cur != n) { 2866 if (TieredCompilation && m == cur->method()) { 2867 // Find max level before n 2868 max_level = MAX2(max_level, cur->comp_level()); 2869 } 2870 last = cur; 2871 cur = cur->osr_link(); 2872 } 2873 nmethod* next = NULL; 2874 if (cur == n) { 2875 found = true; 2876 next = cur->osr_link(); 2877 if (last == NULL) { 2878 // Remove first element 2879 set_osr_nmethods_head(next); 2880 } else { 2881 last->set_osr_link(next); 2882 } 2883 } 2884 n->set_osr_link(NULL); 2885 if (TieredCompilation) { 2886 cur = next; 2887 while (cur != NULL) { 2888 // Find max level after n 2889 if (m == cur->method()) { 2890 max_level = MAX2(max_level, cur->comp_level()); 2891 } 2892 cur = cur->osr_link(); 2893 } 2894 m->set_highest_osr_comp_level(max_level); 2895 } 2896 return found; 2897 } 2898 2899 int InstanceKlass::mark_osr_nmethods(const Method* m) { 2900 // This is a short non-blocking critical region, so the no safepoint check is ok. 2901 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2902 nmethod* osr = osr_nmethods_head(); 2903 int found = 0; 2904 while (osr != NULL) { 2905 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2906 if (osr->method() == m) { 2907 osr->mark_for_deoptimization(); 2908 found++; 2909 } 2910 osr = osr->osr_link(); 2911 } 2912 return found; 2913 } 2914 2915 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { 2916 // This is a short non-blocking critical region, so the no safepoint check is ok. 2917 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2918 nmethod* osr = osr_nmethods_head(); 2919 nmethod* best = NULL; 2920 while (osr != NULL) { 2921 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2922 // There can be a time when a c1 osr method exists but we are waiting 2923 // for a c2 version. When c2 completes its osr nmethod we will trash 2924 // the c1 version and only be able to find the c2 version. However 2925 // while we overflow in the c1 code at back branches we don't want to 2926 // try and switch to the same code as we are already running 2927 2928 if (osr->method() == m && 2929 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) { 2930 if (match_level) { 2931 if (osr->comp_level() == comp_level) { 2932 // Found a match - return it. 2933 return osr; 2934 } 2935 } else { 2936 if (best == NULL || (osr->comp_level() > best->comp_level())) { 2937 if (osr->comp_level() == CompLevel_highest_tier) { 2938 // Found the best possible - return it. 2939 return osr; 2940 } 2941 best = osr; 2942 } 2943 } 2944 } 2945 osr = osr->osr_link(); 2946 } 2947 if (best != NULL && best->comp_level() >= comp_level && match_level == false) { 2948 return best; 2949 } 2950 return NULL; 2951 } 2952 2953 // ----------------------------------------------------------------------------------------------------- 2954 // Printing 2955 2956 #ifndef PRODUCT 2957 2958 #define BULLET " - " 2959 2960 static const char* state_names[] = { 2961 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error" 2962 }; 2963 2964 static void print_vtable(address self, intptr_t* start, int len, outputStream* st) { 2965 ResourceMark rm; 2966 int* forward_refs = NEW_RESOURCE_ARRAY(int, len); 2967 for (int i = 0; i < len; i++) forward_refs[i] = 0; 2968 for (int i = 0; i < len; i++) { 2969 intptr_t e = start[i]; 2970 st->print("%d : " INTPTR_FORMAT, i, e); 2971 if (forward_refs[i] != 0) { 2972 int from = forward_refs[i]; 2973 int off = (int) start[from]; 2974 st->print(" (offset %d <= [%d])", off, from); 2975 } 2976 if (e != 0 && ((Metadata*)e)->is_metaspace_object()) { 2977 st->print(" "); 2978 ((Metadata*)e)->print_value_on(st); 2979 } else if (self != NULL && e > 0 && e < 0x10000) { 2980 address location = self + e; 2981 int index = (int)((intptr_t*)location - start); 2982 st->print(" (offset %d => [%d])", (int)e, index); 2983 if (index >= 0 && index < len) 2984 forward_refs[index] = i; 2985 } 2986 st->cr(); 2987 } 2988 } 2989 2990 static void print_vtable(vtableEntry* start, int len, outputStream* st) { 2991 return print_vtable(NULL, reinterpret_cast<intptr_t*>(start), len, st); 2992 } 2993 2994 template<typename T> 2995 static void print_array_on(outputStream* st, Array<T>* array) { 2996 if (array == NULL) { st->print_cr("NULL"); return; } 2997 array->print_value_on(st); st->cr(); 2998 if (Verbose || WizardMode) { 2999 for (int i = 0; i < array->length(); i++) { 3000 st->print("%d : ", i); array->at(i)->print_value_on(st); st->cr(); 3001 } 3002 } 3003 } 3004 3005 static void print_array_on(outputStream* st, Array<int>* array) { 3006 if (array == NULL) { st->print_cr("NULL"); return; } 3007 array->print_value_on(st); st->cr(); 3008 if (Verbose || WizardMode) { 3009 for (int i = 0; i < array->length(); i++) { 3010 st->print("%d : %d", i, array->at(i)); st->cr(); 3011 } 3012 } 3013 } 3014 3015 void InstanceKlass::print_on(outputStream* st) const { 3016 assert(is_klass(), "must be klass"); 3017 Klass::print_on(st); 3018 3019 st->print(BULLET"instance size: %d", size_helper()); st->cr(); 3020 st->print(BULLET"klass size: %d", size()); st->cr(); 3021 st->print(BULLET"access: "); access_flags().print_on(st); st->cr(); 3022 st->print(BULLET"misc flags: 0x%x", _misc_flags); st->cr(); 3023 st->print(BULLET"state: "); st->print_cr("%s", state_names[_init_state]); 3024 st->print(BULLET"name: "); name()->print_value_on(st); st->cr(); 3025 st->print(BULLET"super: "); super()->print_value_on_maybe_null(st); st->cr(); 3026 st->print(BULLET"sub: "); 3027 Klass* sub = subklass(); 3028 int n; 3029 for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) { 3030 if (n < MaxSubklassPrintSize) { 3031 sub->print_value_on(st); 3032 st->print(" "); 3033 } 3034 } 3035 if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize); 3036 st->cr(); 3037 3038 if (is_interface()) { 3039 st->print_cr(BULLET"nof implementors: %d", nof_implementors()); 3040 if (nof_implementors() == 1) { 3041 st->print_cr(BULLET"implementor: "); 3042 st->print(" "); 3043 implementor()->print_value_on(st); 3044 st->cr(); 3045 } 3046 } 3047 3048 st->print(BULLET"arrays: "); array_klasses()->print_value_on_maybe_null(st); st->cr(); 3049 st->print(BULLET"methods: "); print_array_on(st, methods()); 3050 st->print(BULLET"method ordering: "); print_array_on(st, method_ordering()); 3051 st->print(BULLET"default_methods: "); print_array_on(st, default_methods()); 3052 if (default_vtable_indices() != NULL) { 3053 st->print(BULLET"default vtable indices: "); print_array_on(st, default_vtable_indices()); 3054 } 3055 st->print(BULLET"local interfaces: "); print_array_on(st, local_interfaces()); 3056 st->print(BULLET"trans. interfaces: "); print_array_on(st, transitive_interfaces()); 3057 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr(); 3058 if (class_loader_data() != NULL) { 3059 st->print(BULLET"class loader data: "); 3060 class_loader_data()->print_value_on(st); 3061 st->cr(); 3062 } 3063 st->print(BULLET"host class: "); host_klass()->print_value_on_maybe_null(st); st->cr(); 3064 if (source_file_name() != NULL) { 3065 st->print(BULLET"source file: "); 3066 source_file_name()->print_value_on(st); 3067 st->cr(); 3068 } 3069 if (source_debug_extension() != NULL) { 3070 st->print(BULLET"source debug extension: "); 3071 st->print("%s", source_debug_extension()); 3072 st->cr(); 3073 } 3074 st->print(BULLET"class annotations: "); class_annotations()->print_value_on(st); st->cr(); 3075 st->print(BULLET"class type annotations: "); class_type_annotations()->print_value_on(st); st->cr(); 3076 st->print(BULLET"field annotations: "); fields_annotations()->print_value_on(st); st->cr(); 3077 st->print(BULLET"field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr(); 3078 { 3079 bool have_pv = false; 3080 // previous versions are linked together through the InstanceKlass 3081 for (InstanceKlass* pv_node = previous_versions(); 3082 pv_node != NULL; 3083 pv_node = pv_node->previous_versions()) { 3084 if (!have_pv) 3085 st->print(BULLET"previous version: "); 3086 have_pv = true; 3087 pv_node->constants()->print_value_on(st); 3088 } 3089 if (have_pv) st->cr(); 3090 } 3091 3092 if (generic_signature() != NULL) { 3093 st->print(BULLET"generic signature: "); 3094 generic_signature()->print_value_on(st); 3095 st->cr(); 3096 } 3097 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr(); 3098 st->print(BULLET"java mirror: "); java_mirror()->print_value_on(st); st->cr(); 3099 st->print(BULLET"vtable length %d (start addr: " INTPTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr(); 3100 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st); 3101 st->print(BULLET"itable length %d (start addr: " INTPTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr(); 3102 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(NULL, start_of_itable(), itable_length(), st); 3103 st->print_cr(BULLET"---- static fields (%d words):", static_field_size()); 3104 FieldPrinter print_static_field(st); 3105 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field); 3106 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size()); 3107 FieldPrinter print_nonstatic_field(st); 3108 InstanceKlass* ik = const_cast<InstanceKlass*>(this); 3109 ik->do_nonstatic_fields(&print_nonstatic_field); 3110 3111 st->print(BULLET"non-static oop maps: "); 3112 OopMapBlock* map = start_of_nonstatic_oop_maps(); 3113 OopMapBlock* end_map = map + nonstatic_oop_map_count(); 3114 while (map < end_map) { 3115 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1)); 3116 map++; 3117 } 3118 st->cr(); 3119 } 3120 3121 #endif //PRODUCT 3122 3123 void InstanceKlass::print_value_on(outputStream* st) const { 3124 assert(is_klass(), "must be klass"); 3125 if (Verbose || WizardMode) access_flags().print_on(st); 3126 name()->print_value_on(st); 3127 } 3128 3129 #ifndef PRODUCT 3130 3131 void FieldPrinter::do_field(fieldDescriptor* fd) { 3132 _st->print(BULLET); 3133 if (_obj == NULL) { 3134 fd->print_on(_st); 3135 _st->cr(); 3136 } else { 3137 fd->print_on_for(_st, _obj); 3138 _st->cr(); 3139 } 3140 } 3141 3142 3143 void InstanceKlass::oop_print_on(oop obj, outputStream* st) { 3144 Klass::oop_print_on(obj, st); 3145 3146 if (this == SystemDictionary::String_klass()) { 3147 typeArrayOop value = java_lang_String::value(obj); 3148 juint length = java_lang_String::length(obj); 3149 if (value != NULL && 3150 value->is_typeArray() && 3151 length <= (juint) value->length()) { 3152 st->print(BULLET"string: "); 3153 java_lang_String::print(obj, st); 3154 st->cr(); 3155 if (!WizardMode) return; // that is enough 3156 } 3157 } 3158 3159 st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj)); 3160 FieldPrinter print_field(st, obj); 3161 do_nonstatic_fields(&print_field); 3162 3163 if (this == SystemDictionary::Class_klass()) { 3164 st->print(BULLET"signature: "); 3165 java_lang_Class::print_signature(obj, st); 3166 st->cr(); 3167 Klass* mirrored_klass = java_lang_Class::as_Klass(obj); 3168 st->print(BULLET"fake entry for mirror: "); 3169 mirrored_klass->print_value_on_maybe_null(st); 3170 st->cr(); 3171 Klass* array_klass = java_lang_Class::array_klass_acquire(obj); 3172 st->print(BULLET"fake entry for array: "); 3173 array_klass->print_value_on_maybe_null(st); 3174 st->cr(); 3175 st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj)); 3176 st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj)); 3177 Klass* real_klass = java_lang_Class::as_Klass(obj); 3178 if (real_klass != NULL && real_klass->is_instance_klass()) { 3179 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field); 3180 } 3181 } else if (this == SystemDictionary::MethodType_klass()) { 3182 st->print(BULLET"signature: "); 3183 java_lang_invoke_MethodType::print_signature(obj, st); 3184 st->cr(); 3185 } 3186 } 3187 3188 #endif //PRODUCT 3189 3190 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) { 3191 st->print("a "); 3192 name()->print_value_on(st); 3193 obj->print_address_on(st); 3194 if (this == SystemDictionary::String_klass() 3195 && java_lang_String::value(obj) != NULL) { 3196 ResourceMark rm; 3197 int len = java_lang_String::length(obj); 3198 int plen = (len < 24 ? len : 12); 3199 char* str = java_lang_String::as_utf8_string(obj, 0, plen); 3200 st->print(" = \"%s\"", str); 3201 if (len > plen) 3202 st->print("...[%d]", len); 3203 } else if (this == SystemDictionary::Class_klass()) { 3204 Klass* k = java_lang_Class::as_Klass(obj); 3205 st->print(" = "); 3206 if (k != NULL) { 3207 k->print_value_on(st); 3208 } else { 3209 const char* tname = type2name(java_lang_Class::primitive_type(obj)); 3210 st->print("%s", tname ? tname : "type?"); 3211 } 3212 } else if (this == SystemDictionary::MethodType_klass()) { 3213 st->print(" = "); 3214 java_lang_invoke_MethodType::print_signature(obj, st); 3215 } else if (java_lang_boxing_object::is_instance(obj)) { 3216 st->print(" = "); 3217 java_lang_boxing_object::print(obj, st); 3218 } else if (this == SystemDictionary::LambdaForm_klass()) { 3219 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj); 3220 if (vmentry != NULL) { 3221 st->print(" => "); 3222 vmentry->print_value_on(st); 3223 } 3224 } else if (this == SystemDictionary::MemberName_klass()) { 3225 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj); 3226 if (vmtarget != NULL) { 3227 st->print(" = "); 3228 vmtarget->print_value_on(st); 3229 } else { 3230 java_lang_invoke_MemberName::clazz(obj)->print_value_on(st); 3231 st->print("."); 3232 java_lang_invoke_MemberName::name(obj)->print_value_on(st); 3233 } 3234 } 3235 } 3236 3237 const char* InstanceKlass::internal_name() const { 3238 return external_name(); 3239 } 3240 3241 bool InstanceKlass::is_declared_value_type(int index) { 3242 assert(constants()->is_within_bounds(index) && 3243 constants()->tag_at(index).is_klass_or_reference(), "Invalid index"); 3244 if (value_types() == NULL) return false; // No ValueType attribute in this class file 3245 return InstanceKlass::is_declared_value_type(value_types(), index); 3246 } 3247 3248 bool InstanceKlass::is_declared_value_type(Array<ValueTypes>* value_types, int index) { 3249 assert(value_types != NULL, "Sanity check"); 3250 for(int i = 0; i < value_types->length(); i++) { 3251 if (value_types->at(i)._class_info_index == index) { 3252 return true; 3253 } 3254 } 3255 return false; 3256 } 3257 3258 bool InstanceKlass::is_declared_value_type(Symbol* symbol) { 3259 if (value_types() == NULL) return false; // No ValueType attribute in this class file 3260 return InstanceKlass::is_declared_value_type(constants(), value_types(), symbol); 3261 } 3262 3263 bool InstanceKlass::is_declared_value_type(ConstantPool* constants, Array<ValueTypes>* value_types, Symbol* symbol) { 3264 assert(symbol != NULL, "Sanity check"); 3265 assert(value_types != NULL, "Sanity check"); 3266 for(int i = 0; i < value_types->length(); i++) { 3267 if (value_types->at(i)._class_name == symbol) { 3268 return true; 3269 } 3270 } 3271 // symbol not found, class name symbol might not have been 3272 // updated yet 3273 for(int i = 0; i < value_types->length(); i++) { 3274 if (constants->klass_at_noresolve((int)value_types->at(i)._class_info_index) == symbol) { 3275 value_types->adr_at(i)->_class_name = symbol; 3276 symbol->increment_refcount(); 3277 return true; 3278 } 3279 } 3280 return false; 3281 } 3282 3283 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data, 3284 const char* module_name, 3285 const ClassFileStream* cfs) const { 3286 if (!log_is_enabled(Info, class, load)) { 3287 return; 3288 } 3289 3290 ResourceMark rm; 3291 LogMessage(class, load) msg; 3292 stringStream info_stream; 3293 3294 // Name and class hierarchy info 3295 info_stream.print("%s", external_name()); 3296 3297 // Source 3298 if (cfs != NULL) { 3299 if (cfs->source() != NULL) { 3300 if (module_name != NULL) { 3301 if (ClassLoader::is_modules_image(cfs->source())) { 3302 info_stream.print(" source: jrt:/%s", module_name); 3303 } else { 3304 info_stream.print(" source: %s", cfs->source()); 3305 } 3306 } else { 3307 info_stream.print(" source: %s", cfs->source()); 3308 } 3309 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) { 3310 Thread* THREAD = Thread::current(); 3311 Klass* caller = 3312 THREAD->is_Java_thread() 3313 ? ((JavaThread*)THREAD)->security_get_caller_class(1) 3314 : NULL; 3315 // caller can be NULL, for example, during a JVMTI VM_Init hook 3316 if (caller != NULL) { 3317 info_stream.print(" source: instance of %s", caller->external_name()); 3318 } else { 3319 // source is unknown 3320 } 3321 } else { 3322 oop class_loader = loader_data->class_loader(); 3323 info_stream.print(" source: %s", class_loader->klass()->external_name()); 3324 } 3325 } else { 3326 info_stream.print(" source: shared objects file"); 3327 } 3328 3329 msg.info("%s", info_stream.as_string()); 3330 3331 if (log_is_enabled(Debug, class, load)) { 3332 stringStream debug_stream; 3333 3334 // Class hierarchy info 3335 debug_stream.print(" klass: " INTPTR_FORMAT " super: " INTPTR_FORMAT, 3336 p2i(this), p2i(superklass())); 3337 3338 // Interfaces 3339 if (local_interfaces() != NULL && local_interfaces()->length() > 0) { 3340 debug_stream.print(" interfaces:"); 3341 int length = local_interfaces()->length(); 3342 for (int i = 0; i < length; i++) { 3343 debug_stream.print(" " INTPTR_FORMAT, 3344 p2i(InstanceKlass::cast(local_interfaces()->at(i)))); 3345 } 3346 } 3347 3348 // Class loader 3349 debug_stream.print(" loader: ["); 3350 loader_data->print_value_on(&debug_stream); 3351 debug_stream.print("]"); 3352 3353 // Classfile checksum 3354 if (cfs) { 3355 debug_stream.print(" bytes: %d checksum: %08x", 3356 cfs->length(), 3357 ClassLoader::crc32(0, (const char*)cfs->buffer(), 3358 cfs->length())); 3359 } 3360 3361 msg.debug("%s", debug_stream.as_string()); 3362 } 3363 } 3364 3365 #if INCLUDE_SERVICES 3366 // Size Statistics 3367 void InstanceKlass::collect_statistics(KlassSizeStats *sz) const { 3368 Klass::collect_statistics(sz); 3369 3370 sz->_inst_size = wordSize * size_helper(); 3371 sz->_vtab_bytes = wordSize * vtable_length(); 3372 sz->_itab_bytes = wordSize * itable_length(); 3373 sz->_nonstatic_oopmap_bytes = wordSize * nonstatic_oop_map_size(); 3374 3375 int n = 0; 3376 n += (sz->_methods_array_bytes = sz->count_array(methods())); 3377 n += (sz->_method_ordering_bytes = sz->count_array(method_ordering())); 3378 n += (sz->_local_interfaces_bytes = sz->count_array(local_interfaces())); 3379 n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces())); 3380 n += (sz->_fields_bytes = sz->count_array(fields())); 3381 n += (sz->_inner_classes_bytes = sz->count_array(inner_classes())); 3382 sz->_ro_bytes += n; 3383 3384 const ConstantPool* cp = constants(); 3385 if (cp) { 3386 cp->collect_statistics(sz); 3387 } 3388 3389 const Annotations* anno = annotations(); 3390 if (anno) { 3391 anno->collect_statistics(sz); 3392 } 3393 3394 const Array<Method*>* methods_array = methods(); 3395 if (methods()) { 3396 for (int i = 0; i < methods_array->length(); i++) { 3397 Method* method = methods_array->at(i); 3398 if (method) { 3399 sz->_method_count ++; 3400 method->collect_statistics(sz); 3401 } 3402 } 3403 } 3404 } 3405 #endif // INCLUDE_SERVICES 3406 3407 // Verification 3408 3409 class VerifyFieldClosure: public OopClosure { 3410 protected: 3411 template <class T> void do_oop_work(T* p) { 3412 oop obj = oopDesc::load_decode_heap_oop(p); 3413 if (!oopDesc::is_oop_or_null(obj)) { 3414 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj)); 3415 Universe::print_on(tty); 3416 guarantee(false, "boom"); 3417 } 3418 } 3419 public: 3420 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); } 3421 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); } 3422 }; 3423 3424 void InstanceKlass::verify_on(outputStream* st) { 3425 #ifndef PRODUCT 3426 // Avoid redundant verifies, this really should be in product. 3427 if (_verify_count == Universe::verify_count()) return; 3428 _verify_count = Universe::verify_count(); 3429 #endif 3430 3431 // Verify Klass 3432 Klass::verify_on(st); 3433 3434 // Verify that klass is present in ClassLoaderData 3435 guarantee(class_loader_data()->contains_klass(this), 3436 "this class isn't found in class loader data"); 3437 3438 // Verify vtables 3439 if (is_linked()) { 3440 // $$$ This used to be done only for m/s collections. Doing it 3441 // always seemed a valid generalization. (DLD -- 6/00) 3442 vtable().verify(st); 3443 } 3444 3445 // Verify first subklass 3446 if (subklass() != NULL) { 3447 guarantee(subklass()->is_klass(), "should be klass"); 3448 } 3449 3450 // Verify siblings 3451 Klass* super = this->super(); 3452 Klass* sib = next_sibling(); 3453 if (sib != NULL) { 3454 if (sib == this) { 3455 fatal("subclass points to itself " PTR_FORMAT, p2i(sib)); 3456 } 3457 3458 guarantee(sib->is_klass(), "should be klass"); 3459 guarantee(sib->super() == super, "siblings should have same superklass"); 3460 } 3461 3462 // Verify implementor fields 3463 Klass* im = implementor(); 3464 if (im != NULL) { 3465 guarantee(is_interface(), "only interfaces should have implementor set"); 3466 guarantee(im->is_klass(), "should be klass"); 3467 guarantee(!im->is_interface() || im == this, 3468 "implementors cannot be interfaces"); 3469 } 3470 3471 // Verify local interfaces 3472 if (local_interfaces()) { 3473 Array<Klass*>* local_interfaces = this->local_interfaces(); 3474 for (int j = 0; j < local_interfaces->length(); j++) { 3475 Klass* e = local_interfaces->at(j); 3476 guarantee(e->is_klass() && e->is_interface(), "invalid local interface"); 3477 } 3478 } 3479 3480 // Verify transitive interfaces 3481 if (transitive_interfaces() != NULL) { 3482 Array<Klass*>* transitive_interfaces = this->transitive_interfaces(); 3483 for (int j = 0; j < transitive_interfaces->length(); j++) { 3484 Klass* e = transitive_interfaces->at(j); 3485 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface"); 3486 } 3487 } 3488 3489 // Verify methods 3490 if (methods() != NULL) { 3491 Array<Method*>* methods = this->methods(); 3492 for (int j = 0; j < methods->length(); j++) { 3493 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3494 } 3495 for (int j = 0; j < methods->length() - 1; j++) { 3496 Method* m1 = methods->at(j); 3497 Method* m2 = methods->at(j + 1); 3498 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3499 } 3500 } 3501 3502 // Verify method ordering 3503 if (method_ordering() != NULL) { 3504 Array<int>* method_ordering = this->method_ordering(); 3505 int length = method_ordering->length(); 3506 if (JvmtiExport::can_maintain_original_method_order() || 3507 ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) { 3508 guarantee(length == methods()->length(), "invalid method ordering length"); 3509 jlong sum = 0; 3510 for (int j = 0; j < length; j++) { 3511 int original_index = method_ordering->at(j); 3512 guarantee(original_index >= 0, "invalid method ordering index"); 3513 guarantee(original_index < length, "invalid method ordering index"); 3514 sum += original_index; 3515 } 3516 // Verify sum of indices 0,1,...,length-1 3517 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum"); 3518 } else { 3519 guarantee(length == 0, "invalid method ordering length"); 3520 } 3521 } 3522 3523 // Verify default methods 3524 if (default_methods() != NULL) { 3525 Array<Method*>* methods = this->default_methods(); 3526 for (int j = 0; j < methods->length(); j++) { 3527 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3528 } 3529 for (int j = 0; j < methods->length() - 1; j++) { 3530 Method* m1 = methods->at(j); 3531 Method* m2 = methods->at(j + 1); 3532 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3533 } 3534 } 3535 3536 // Verify JNI static field identifiers 3537 if (jni_ids() != NULL) { 3538 jni_ids()->verify(this); 3539 } 3540 3541 // Verify other fields 3542 if (array_klasses() != NULL) { 3543 guarantee(array_klasses()->is_klass(), "should be klass"); 3544 } 3545 if (constants() != NULL) { 3546 guarantee(constants()->is_constantPool(), "should be constant pool"); 3547 } 3548 const Klass* host = host_klass(); 3549 if (host != NULL) { 3550 guarantee(host->is_klass(), "should be klass"); 3551 } 3552 } 3553 3554 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) { 3555 Klass::oop_verify_on(obj, st); 3556 VerifyFieldClosure blk; 3557 obj->oop_iterate_no_header(&blk); 3558 } 3559 3560 3561 // JNIid class for jfieldIDs only 3562 // Note to reviewers: 3563 // These JNI functions are just moved over to column 1 and not changed 3564 // in the compressed oops workspace. 3565 JNIid::JNIid(Klass* holder, int offset, JNIid* next) { 3566 _holder = holder; 3567 _offset = offset; 3568 _next = next; 3569 debug_only(_is_static_field_id = false;) 3570 } 3571 3572 3573 JNIid* JNIid::find(int offset) { 3574 JNIid* current = this; 3575 while (current != NULL) { 3576 if (current->offset() == offset) return current; 3577 current = current->next(); 3578 } 3579 return NULL; 3580 } 3581 3582 void JNIid::deallocate(JNIid* current) { 3583 while (current != NULL) { 3584 JNIid* next = current->next(); 3585 delete current; 3586 current = next; 3587 } 3588 } 3589 3590 3591 void JNIid::verify(Klass* holder) { 3592 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields(); 3593 int end_field_offset; 3594 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize); 3595 3596 JNIid* current = this; 3597 while (current != NULL) { 3598 guarantee(current->holder() == holder, "Invalid klass in JNIid"); 3599 #ifdef ASSERT 3600 int o = current->offset(); 3601 if (current->is_static_field_id()) { 3602 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid"); 3603 } 3604 #endif 3605 current = current->next(); 3606 } 3607 } 3608 3609 oop InstanceKlass::klass_holder_phantom() { 3610 oop* addr; 3611 if (is_anonymous()) { 3612 addr = _java_mirror.ptr_raw(); 3613 } else { 3614 addr = &class_loader_data()->_class_loader; 3615 } 3616 return RootAccess<IN_CONCURRENT_ROOT | ON_PHANTOM_OOP_REF>::oop_load(addr); 3617 } 3618 3619 #ifdef ASSERT 3620 void InstanceKlass::set_init_state(ClassState state) { 3621 bool good_state = is_shared() ? (_init_state <= state) 3622 : (_init_state < state); 3623 assert(good_state || state == allocated, "illegal state transition"); 3624 _init_state = (u1)state; 3625 } 3626 #endif 3627 3628 #if INCLUDE_JVMTI 3629 3630 // RedefineClasses() support for previous versions 3631 3632 // Globally, there is at least one previous version of a class to walk 3633 // during class unloading, which is saved because old methods in the class 3634 // are still running. Otherwise the previous version list is cleaned up. 3635 bool InstanceKlass::_has_previous_versions = false; 3636 3637 // Returns true if there are previous versions of a class for class 3638 // unloading only. Also resets the flag to false. purge_previous_version 3639 // will set the flag to true if there are any left, i.e., if there's any 3640 // work to do for next time. This is to avoid the expensive code cache 3641 // walk in CLDG::do_unloading(). 3642 bool InstanceKlass::has_previous_versions_and_reset() { 3643 bool ret = _has_previous_versions; 3644 log_trace(redefine, class, iklass, purge)("Class unloading: has_previous_versions = %s", 3645 ret ? "true" : "false"); 3646 _has_previous_versions = false; 3647 return ret; 3648 } 3649 3650 // Purge previous versions before adding new previous versions of the class and 3651 // during class unloading. 3652 void InstanceKlass::purge_previous_version_list() { 3653 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 3654 assert(has_been_redefined(), "Should only be called for main class"); 3655 3656 // Quick exit. 3657 if (previous_versions() == NULL) { 3658 return; 3659 } 3660 3661 // This klass has previous versions so see what we can cleanup 3662 // while it is safe to do so. 3663 3664 int deleted_count = 0; // leave debugging breadcrumbs 3665 int live_count = 0; 3666 ClassLoaderData* loader_data = class_loader_data(); 3667 assert(loader_data != NULL, "should never be null"); 3668 3669 ResourceMark rm; 3670 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name()); 3671 3672 // previous versions are linked together through the InstanceKlass 3673 InstanceKlass* pv_node = previous_versions(); 3674 InstanceKlass* last = this; 3675 int version = 0; 3676 3677 // check the previous versions list 3678 for (; pv_node != NULL; ) { 3679 3680 ConstantPool* pvcp = pv_node->constants(); 3681 assert(pvcp != NULL, "cp ref was unexpectedly cleared"); 3682 3683 if (!pvcp->on_stack()) { 3684 // If the constant pool isn't on stack, none of the methods 3685 // are executing. Unlink this previous_version. 3686 // The previous version InstanceKlass is on the ClassLoaderData deallocate list 3687 // so will be deallocated during the next phase of class unloading. 3688 log_trace(redefine, class, iklass, purge) 3689 ("previous version " INTPTR_FORMAT " is dead.", p2i(pv_node)); 3690 // For debugging purposes. 3691 pv_node->set_is_scratch_class(); 3692 // Unlink from previous version list. 3693 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data"); 3694 InstanceKlass* next = pv_node->previous_versions(); 3695 pv_node->link_previous_versions(NULL); // point next to NULL 3696 last->link_previous_versions(next); 3697 // Add to the deallocate list after unlinking 3698 loader_data->add_to_deallocate_list(pv_node); 3699 pv_node = next; 3700 deleted_count++; 3701 version++; 3702 continue; 3703 } else { 3704 log_trace(redefine, class, iklass, purge)("previous version " INTPTR_FORMAT " is alive", p2i(pv_node)); 3705 assert(pvcp->pool_holder() != NULL, "Constant pool with no holder"); 3706 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack"); 3707 live_count++; 3708 // found a previous version for next time we do class unloading 3709 _has_previous_versions = true; 3710 } 3711 3712 // At least one method is live in this previous version. 3713 // Reset dead EMCP methods not to get breakpoints. 3714 // All methods are deallocated when all of the methods for this class are no 3715 // longer running. 3716 Array<Method*>* method_refs = pv_node->methods(); 3717 if (method_refs != NULL) { 3718 log_trace(redefine, class, iklass, purge)("previous methods length=%d", method_refs->length()); 3719 for (int j = 0; j < method_refs->length(); j++) { 3720 Method* method = method_refs->at(j); 3721 3722 if (!method->on_stack()) { 3723 // no breakpoints for non-running methods 3724 if (method->is_running_emcp()) { 3725 method->set_running_emcp(false); 3726 } 3727 } else { 3728 assert (method->is_obsolete() || method->is_running_emcp(), 3729 "emcp method cannot run after emcp bit is cleared"); 3730 log_trace(redefine, class, iklass, purge) 3731 ("purge: %s(%s): prev method @%d in version @%d is alive", 3732 method->name()->as_C_string(), method->signature()->as_C_string(), j, version); 3733 } 3734 } 3735 } 3736 // next previous version 3737 last = pv_node; 3738 pv_node = pv_node->previous_versions(); 3739 version++; 3740 } 3741 log_trace(redefine, class, iklass, purge) 3742 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count); 3743 } 3744 3745 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods, 3746 int emcp_method_count) { 3747 int obsolete_method_count = old_methods->length() - emcp_method_count; 3748 3749 if (emcp_method_count != 0 && obsolete_method_count != 0 && 3750 _previous_versions != NULL) { 3751 // We have a mix of obsolete and EMCP methods so we have to 3752 // clear out any matching EMCP method entries the hard way. 3753 int local_count = 0; 3754 for (int i = 0; i < old_methods->length(); i++) { 3755 Method* old_method = old_methods->at(i); 3756 if (old_method->is_obsolete()) { 3757 // only obsolete methods are interesting 3758 Symbol* m_name = old_method->name(); 3759 Symbol* m_signature = old_method->signature(); 3760 3761 // previous versions are linked together through the InstanceKlass 3762 int j = 0; 3763 for (InstanceKlass* prev_version = _previous_versions; 3764 prev_version != NULL; 3765 prev_version = prev_version->previous_versions(), j++) { 3766 3767 Array<Method*>* method_refs = prev_version->methods(); 3768 for (int k = 0; k < method_refs->length(); k++) { 3769 Method* method = method_refs->at(k); 3770 3771 if (!method->is_obsolete() && 3772 method->name() == m_name && 3773 method->signature() == m_signature) { 3774 // The current RedefineClasses() call has made all EMCP 3775 // versions of this method obsolete so mark it as obsolete 3776 log_trace(redefine, class, iklass, add) 3777 ("%s(%s): flush obsolete method @%d in version @%d", 3778 m_name->as_C_string(), m_signature->as_C_string(), k, j); 3779 3780 method->set_is_obsolete(); 3781 break; 3782 } 3783 } 3784 3785 // The previous loop may not find a matching EMCP method, but 3786 // that doesn't mean that we can optimize and not go any 3787 // further back in the PreviousVersion generations. The EMCP 3788 // method for this generation could have already been made obsolete, 3789 // but there still may be an older EMCP method that has not 3790 // been made obsolete. 3791 } 3792 3793 if (++local_count >= obsolete_method_count) { 3794 // no more obsolete methods so bail out now 3795 break; 3796 } 3797 } 3798 } 3799 } 3800 } 3801 3802 // Save the scratch_class as the previous version if any of the methods are running. 3803 // The previous_versions are used to set breakpoints in EMCP methods and they are 3804 // also used to clean MethodData links to redefined methods that are no longer running. 3805 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class, 3806 int emcp_method_count) { 3807 assert(Thread::current()->is_VM_thread(), 3808 "only VMThread can add previous versions"); 3809 3810 ResourceMark rm; 3811 log_trace(redefine, class, iklass, add) 3812 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count); 3813 3814 // Clean out old previous versions for this class 3815 purge_previous_version_list(); 3816 3817 // Mark newly obsolete methods in remaining previous versions. An EMCP method from 3818 // a previous redefinition may be made obsolete by this redefinition. 3819 Array<Method*>* old_methods = scratch_class->methods(); 3820 mark_newly_obsolete_methods(old_methods, emcp_method_count); 3821 3822 // If the constant pool for this previous version of the class 3823 // is not marked as being on the stack, then none of the methods 3824 // in this previous version of the class are on the stack so 3825 // we don't need to add this as a previous version. 3826 ConstantPool* cp_ref = scratch_class->constants(); 3827 if (!cp_ref->on_stack()) { 3828 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running"); 3829 // For debugging purposes. 3830 scratch_class->set_is_scratch_class(); 3831 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class); 3832 return; 3833 } 3834 3835 if (emcp_method_count != 0) { 3836 // At least one method is still running, check for EMCP methods 3837 for (int i = 0; i < old_methods->length(); i++) { 3838 Method* old_method = old_methods->at(i); 3839 if (!old_method->is_obsolete() && old_method->on_stack()) { 3840 // if EMCP method (not obsolete) is on the stack, mark as EMCP so that 3841 // we can add breakpoints for it. 3842 3843 // We set the method->on_stack bit during safepoints for class redefinition 3844 // and use this bit to set the is_running_emcp bit. 3845 // After the safepoint, the on_stack bit is cleared and the running emcp 3846 // method may exit. If so, we would set a breakpoint in a method that 3847 // is never reached, but this won't be noticeable to the programmer. 3848 old_method->set_running_emcp(true); 3849 log_trace(redefine, class, iklass, add) 3850 ("EMCP method %s is on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method)); 3851 } else if (!old_method->is_obsolete()) { 3852 log_trace(redefine, class, iklass, add) 3853 ("EMCP method %s is NOT on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method)); 3854 } 3855 } 3856 } 3857 3858 // Add previous version if any methods are still running. 3859 // Set has_previous_version flag for processing during class unloading. 3860 _has_previous_versions = true; 3861 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack."); 3862 assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version"); 3863 scratch_class->link_previous_versions(previous_versions()); 3864 link_previous_versions(scratch_class); 3865 } // end add_previous_version() 3866 3867 #endif // INCLUDE_JVMTI 3868 3869 Method* InstanceKlass::method_with_idnum(int idnum) { 3870 Method* m = NULL; 3871 if (idnum < methods()->length()) { 3872 m = methods()->at(idnum); 3873 } 3874 if (m == NULL || m->method_idnum() != idnum) { 3875 for (int index = 0; index < methods()->length(); ++index) { 3876 m = methods()->at(index); 3877 if (m->method_idnum() == idnum) { 3878 return m; 3879 } 3880 } 3881 // None found, return null for the caller to handle. 3882 return NULL; 3883 } 3884 return m; 3885 } 3886 3887 3888 Method* InstanceKlass::method_with_orig_idnum(int idnum) { 3889 if (idnum >= methods()->length()) { 3890 return NULL; 3891 } 3892 Method* m = methods()->at(idnum); 3893 if (m != NULL && m->orig_method_idnum() == idnum) { 3894 return m; 3895 } 3896 // Obsolete method idnum does not match the original idnum 3897 for (int index = 0; index < methods()->length(); ++index) { 3898 m = methods()->at(index); 3899 if (m->orig_method_idnum() == idnum) { 3900 return m; 3901 } 3902 } 3903 // None found, return null for the caller to handle. 3904 return NULL; 3905 } 3906 3907 3908 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) { 3909 InstanceKlass* holder = get_klass_version(version); 3910 if (holder == NULL) { 3911 return NULL; // The version of klass is gone, no method is found 3912 } 3913 Method* method = holder->method_with_orig_idnum(idnum); 3914 return method; 3915 } 3916 3917 #if INCLUDE_JVMTI 3918 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() { 3919 if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 3920 // Ignore the archived class stream data 3921 return NULL; 3922 } else { 3923 return _cached_class_file; 3924 } 3925 } 3926 3927 jint InstanceKlass::get_cached_class_file_len() { 3928 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file); 3929 } 3930 3931 unsigned char * InstanceKlass::get_cached_class_file_bytes() { 3932 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file); 3933 } 3934 3935 #if INCLUDE_CDS 3936 JvmtiCachedClassFileData* InstanceKlass::get_archived_class_data() { 3937 if (DumpSharedSpaces) { 3938 return _cached_class_file; 3939 } else { 3940 assert(this->is_shared(), "class should be shared"); 3941 if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 3942 return _cached_class_file; 3943 } else { 3944 return NULL; 3945 } 3946 } 3947 } 3948 #endif 3949 #endif 3950 3951 #define THROW_DVT_ERROR(s) \ 3952 Exceptions::fthrow(THREAD_AND_LOCATION, vmSymbols::java_lang_IncompatibleClassChangeError(), \ 3953 "ValueCapableClass class '%s' %s", external_name(),(s)); \ 3954 return