1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "jvm.h" 27 #include "aot/aotLoader.hpp" 28 #include "classfile/classFileParser.hpp" 29 #include "classfile/classFileStream.hpp" 30 #include "classfile/classLoader.hpp" 31 #include "classfile/javaClasses.hpp" 32 #include "classfile/moduleEntry.hpp" 33 #include "classfile/systemDictionary.hpp" 34 #include "classfile/systemDictionaryShared.hpp" 35 #include "classfile/verifier.hpp" 36 #include "classfile/vmSymbols.hpp" 37 #include "code/dependencyContext.hpp" 38 #include "compiler/compileBroker.hpp" 39 #include "gc/shared/collectedHeap.inline.hpp" 40 #include "gc/shared/specialized_oop_closures.hpp" 41 #include "interpreter/oopMapCache.hpp" 42 #include "interpreter/rewriter.hpp" 43 #include "jvmtifiles/jvmti.h" 44 #include "logging/log.hpp" 45 #include "logging/logMessage.hpp" 46 #include "logging/logStream.hpp" 47 #include "memory/heapInspection.hpp" 48 #include "memory/iterator.inline.hpp" 49 #include "memory/metadataFactory.hpp" 50 #include "memory/metaspaceClosure.hpp" 51 #include "memory/metaspaceShared.hpp" 52 #include "memory/oopFactory.hpp" 53 #include "memory/resourceArea.hpp" 54 #include "oops/fieldStreams.hpp" 55 #include "oops/instanceClassLoaderKlass.hpp" 56 #include "oops/instanceKlass.inline.hpp" 57 #include "oops/instanceMirrorKlass.hpp" 58 #include "oops/instanceOop.hpp" 59 #include "oops/klass.inline.hpp" 60 #include "oops/method.hpp" 61 #include "oops/oop.inline.hpp" 62 #include "oops/symbol.hpp" 63 #include "oops/valueKlass.hpp" 64 #include "prims/jvmtiExport.hpp" 65 #include "prims/jvmtiRedefineClasses.hpp" 66 #include "prims/jvmtiThreadState.hpp" 67 #include "prims/methodComparator.hpp" 68 #include "runtime/atomic.hpp" 69 #include "runtime/fieldDescriptor.hpp" 70 #include "runtime/handles.inline.hpp" 71 #include "runtime/javaCalls.hpp" 72 #include "runtime/mutexLocker.hpp" 73 #include "runtime/orderAccess.inline.hpp" 74 #include "runtime/thread.inline.hpp" 75 #include "services/classLoadingService.hpp" 76 #include "services/threadService.hpp" 77 #include "utilities/dtrace.hpp" 78 #include "utilities/macros.hpp" 79 #include "utilities/stringUtils.hpp" 80 #ifdef COMPILER1 81 #include "c1/c1_Compiler.hpp" 82 #endif 83 84 #ifdef DTRACE_ENABLED 85 86 87 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED 88 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE 89 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT 90 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS 91 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED 92 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT 93 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR 94 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END 95 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \ 96 { \ 97 char* data = NULL; \ 98 int len = 0; \ 99 Symbol* clss_name = name(); \ 100 if (clss_name != NULL) { \ 101 data = (char*)clss_name->bytes(); \ 102 len = clss_name->utf8_length(); \ 103 } \ 104 HOTSPOT_CLASS_INITIALIZATION_##type( \ 105 data, len, (void*)class_loader(), thread_type); \ 106 } 107 108 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \ 109 { \ 110 char* data = NULL; \ 111 int len = 0; \ 112 Symbol* clss_name = name(); \ 113 if (clss_name != NULL) { \ 114 data = (char*)clss_name->bytes(); \ 115 len = clss_name->utf8_length(); \ 116 } \ 117 HOTSPOT_CLASS_INITIALIZATION_##type( \ 118 data, len, (void*)class_loader(), thread_type, wait); \ 119 } 120 121 #else // ndef DTRACE_ENABLED 122 123 #define DTRACE_CLASSINIT_PROBE(type, thread_type) 124 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) 125 126 #endif // ndef DTRACE_ENABLED 127 128 static inline bool is_class_loader(const Symbol* class_name, 129 const ClassFileParser& parser) { 130 assert(class_name != NULL, "invariant"); 131 132 if (class_name == vmSymbols::java_lang_ClassLoader()) { 133 return true; 134 } 135 136 if (SystemDictionary::ClassLoader_klass_loaded()) { 137 const Klass* const super_klass = parser.super_klass(); 138 if (super_klass != NULL) { 139 if (super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass())) { 140 return true; 141 } 142 } 143 } 144 return false; 145 } 146 147 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) { 148 const int size = InstanceKlass::size(parser.vtable_size(), 149 parser.itable_size(), 150 nonstatic_oop_map_size(parser.total_oop_map_count()), 151 parser.is_interface(), 152 parser.is_anonymous(), 153 should_store_fingerprint(parser.is_anonymous()), 154 parser.has_flattenable_fields() ? parser.java_fields_count() : 0, 155 parser.is_value_type()); 156 157 const Symbol* const class_name = parser.class_name(); 158 assert(class_name != NULL, "invariant"); 159 ClassLoaderData* loader_data = parser.loader_data(); 160 assert(loader_data != NULL, "invariant"); 161 162 InstanceKlass* ik; 163 164 // Allocation 165 if (REF_NONE == parser.reference_type()) { 166 if (class_name == vmSymbols::java_lang_Class()) { 167 // mirror 168 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser); 169 } else if (is_class_loader(class_name, parser)) { 170 // class loader 171 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser); 172 } else if (parser.is_value_type()) { 173 // value type 174 ik = new (loader_data, size, THREAD) ValueKlass(parser); 175 } else { 176 // normal 177 ik = new (loader_data, size, THREAD) InstanceKlass(parser, InstanceKlass::_misc_kind_other); 178 } 179 } else { 180 // reference 181 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser); 182 } 183 184 // Check for pending exception before adding to the loader data and incrementing 185 // class count. Can get OOM here. 186 if (HAS_PENDING_EXCEPTION) { 187 return NULL; 188 } 189 190 assert(ik != NULL, "invariant"); 191 192 const bool publicize = !parser.is_internal(); 193 #ifdef ASSERT 194 assert(ik->size() == size, ""); 195 ik->bounds_check((address) ik->start_of_vtable(), false, size); 196 ik->bounds_check((address) ik->start_of_itable(), false, size); 197 ik->bounds_check((address) ik->end_of_itable(), true, size); 198 ik->bounds_check((address) ik->end_of_nonstatic_oop_maps(), true, size); 199 #endif //ASSERT 200 201 // Add all classes to our internal class loader list here, 202 // including classes in the bootstrap (NULL) class loader. 203 loader_data->add_class(ik, publicize); 204 return ik; 205 } 206 207 #ifndef PRODUCT 208 bool InstanceKlass::bounds_check(address addr, bool edge_ok, intptr_t size_in_bytes) const { 209 const char* bad = NULL; 210 address end = NULL; 211 if (addr < (address)this) { 212 bad = "before"; 213 } else if (addr == (address)this) { 214 if (edge_ok) return true; 215 bad = "just before"; 216 } else if (addr == (end = (address)this + sizeof(intptr_t) * (size_in_bytes < 0 ? size() : size_in_bytes))) { 217 if (edge_ok) return true; 218 bad = "just after"; 219 } else if (addr > end) { 220 bad = "after"; 221 } else { 222 return true; 223 } 224 tty->print_cr("%s object bounds: " INTPTR_FORMAT " [" INTPTR_FORMAT ".." INTPTR_FORMAT "]", 225 bad, (intptr_t)addr, (intptr_t)this, (intptr_t)end); 226 Verbose = WizardMode = true; this->print(); //@@ 227 return false; 228 } 229 #endif //PRODUCT 230 231 // copy method ordering from resource area to Metaspace 232 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) { 233 if (m != NULL) { 234 // allocate a new array and copy contents (memcpy?) 235 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK); 236 for (int i = 0; i < m->length(); i++) { 237 _method_ordering->at_put(i, m->at(i)); 238 } 239 } else { 240 _method_ordering = Universe::the_empty_int_array(); 241 } 242 } 243 244 // create a new array of vtable_indices for default methods 245 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) { 246 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL); 247 assert(default_vtable_indices() == NULL, "only create once"); 248 set_default_vtable_indices(vtable_indices); 249 return vtable_indices; 250 } 251 252 InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind) : 253 _static_field_size(parser.static_field_size()), 254 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())), 255 _itable_len(parser.itable_size()), 256 _reference_type(parser.reference_type()), 257 _extra_flags(0), 258 _adr_valueklass_fixed_block(NULL) { 259 set_vtable_length(parser.vtable_size()); 260 set_kind(kind); 261 set_access_flags(parser.access_flags()); 262 set_is_anonymous(parser.is_anonymous()); 263 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(), 264 false)); 265 if (parser.has_flattenable_fields()) { 266 set_has_value_fields(); 267 } 268 _java_fields_count = parser.java_fields_count(); 269 270 assert(NULL == _methods, "underlying memory not zeroed?"); 271 assert(is_instance_klass(), "is layout incorrect?"); 272 assert(size_helper() == parser.layout_size(), "incorrect size_helper?"); 273 } 274 275 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data, 276 Array<Method*>* methods) { 277 if (methods != NULL && methods != Universe::the_empty_method_array() && 278 !methods->is_shared()) { 279 for (int i = 0; i < methods->length(); i++) { 280 Method* method = methods->at(i); 281 if (method == NULL) continue; // maybe null if error processing 282 // Only want to delete methods that are not executing for RedefineClasses. 283 // The previous version will point to them so they're not totally dangling 284 assert (!method->on_stack(), "shouldn't be called with methods on stack"); 285 MetadataFactory::free_metadata(loader_data, method); 286 } 287 MetadataFactory::free_array<Method*>(loader_data, methods); 288 } 289 } 290 291 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data, 292 const Klass* super_klass, 293 Array<Klass*>* local_interfaces, 294 Array<Klass*>* transitive_interfaces) { 295 // Only deallocate transitive interfaces if not empty, same as super class 296 // or same as local interfaces. See code in parseClassFile. 297 Array<Klass*>* ti = transitive_interfaces; 298 if (ti != Universe::the_empty_klass_array() && ti != local_interfaces) { 299 // check that the interfaces don't come from super class 300 Array<Klass*>* sti = (super_klass == NULL) ? NULL : 301 InstanceKlass::cast(super_klass)->transitive_interfaces(); 302 if (ti != sti && ti != NULL && !ti->is_shared()) { 303 MetadataFactory::free_array<Klass*>(loader_data, ti); 304 } 305 } 306 307 // local interfaces can be empty 308 if (local_interfaces != Universe::the_empty_klass_array() && 309 local_interfaces != NULL && !local_interfaces->is_shared()) { 310 MetadataFactory::free_array<Klass*>(loader_data, local_interfaces); 311 } 312 } 313 314 // This function deallocates the metadata and C heap pointers that the 315 // InstanceKlass points to. 316 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) { 317 318 // Orphan the mirror first, CMS thinks it's still live. 319 if (java_mirror() != NULL) { 320 java_lang_Class::set_klass(java_mirror(), NULL); 321 } 322 323 // Also remove mirror from handles 324 loader_data->remove_handle(_java_mirror); 325 326 // Need to take this class off the class loader data list. 327 loader_data->remove_class(this); 328 329 // The array_klass for this class is created later, after error handling. 330 // For class redefinition, we keep the original class so this scratch class 331 // doesn't have an array class. Either way, assert that there is nothing 332 // to deallocate. 333 assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet"); 334 335 // Release C heap allocated data that this might point to, which includes 336 // reference counting symbol names. 337 release_C_heap_structures(); 338 339 deallocate_methods(loader_data, methods()); 340 set_methods(NULL); 341 342 if (method_ordering() != NULL && 343 method_ordering() != Universe::the_empty_int_array() && 344 !method_ordering()->is_shared()) { 345 MetadataFactory::free_array<int>(loader_data, method_ordering()); 346 } 347 set_method_ordering(NULL); 348 349 // default methods can be empty 350 if (default_methods() != NULL && 351 default_methods() != Universe::the_empty_method_array() && 352 !default_methods()->is_shared()) { 353 MetadataFactory::free_array<Method*>(loader_data, default_methods()); 354 } 355 // Do NOT deallocate the default methods, they are owned by superinterfaces. 356 set_default_methods(NULL); 357 358 // default methods vtable indices can be empty 359 if (default_vtable_indices() != NULL && 360 !default_vtable_indices()->is_shared()) { 361 MetadataFactory::free_array<int>(loader_data, default_vtable_indices()); 362 } 363 set_default_vtable_indices(NULL); 364 365 366 // This array is in Klass, but remove it with the InstanceKlass since 367 // this place would be the only caller and it can share memory with transitive 368 // interfaces. 369 if (secondary_supers() != NULL && 370 secondary_supers() != Universe::the_empty_klass_array() && 371 secondary_supers() != transitive_interfaces() && 372 !secondary_supers()->is_shared()) { 373 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers()); 374 } 375 set_secondary_supers(NULL); 376 377 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces()); 378 set_transitive_interfaces(NULL); 379 set_local_interfaces(NULL); 380 381 if (fields() != NULL && !fields()->is_shared()) { 382 MetadataFactory::free_array<jushort>(loader_data, fields()); 383 } 384 set_fields(NULL, 0); 385 386 // If a method from a redefined class is using this constant pool, don't 387 // delete it, yet. The new class's previous version will point to this. 388 if (constants() != NULL) { 389 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack"); 390 if (!constants()->is_shared()) { 391 MetadataFactory::free_metadata(loader_data, constants()); 392 } 393 // Delete any cached resolution errors for the constant pool 394 SystemDictionary::delete_resolution_error(constants()); 395 396 set_constants(NULL); 397 } 398 399 if (inner_classes() != NULL && 400 inner_classes() != Universe::the_empty_short_array() && 401 !inner_classes()->is_shared()) { 402 MetadataFactory::free_array<jushort>(loader_data, inner_classes()); 403 } 404 set_inner_classes(NULL); 405 406 if (value_types() != NULL && !value_types()->is_shared()) { 407 MetadataFactory::free_array<ValueTypes>(loader_data, value_types()); 408 } 409 set_value_types(NULL); 410 411 // We should deallocate the Annotations instance if it's not in shared spaces. 412 if (annotations() != NULL && !annotations()->is_shared()) { 413 MetadataFactory::free_metadata(loader_data, annotations()); 414 } 415 set_annotations(NULL); 416 } 417 418 bool InstanceKlass::should_be_initialized() const { 419 return !is_initialized(); 420 } 421 422 klassItable InstanceKlass::itable() const { 423 return klassItable(const_cast<InstanceKlass*>(this)); 424 } 425 426 void InstanceKlass::eager_initialize(Thread *thread) { 427 if (!EagerInitialization) return; 428 429 if (this->is_not_initialized()) { 430 // abort if the the class has a class initializer 431 if (this->class_initializer() != NULL) return; 432 433 // abort if it is java.lang.Object (initialization is handled in genesis) 434 Klass* super_klass = super(); 435 if (super_klass == NULL) return; 436 437 // abort if the super class should be initialized 438 if (!InstanceKlass::cast(super_klass)->is_initialized()) return; 439 440 // call body to expose the this pointer 441 eager_initialize_impl(); 442 } 443 } 444 445 // JVMTI spec thinks there are signers and protection domain in the 446 // instanceKlass. These accessors pretend these fields are there. 447 // The hprof specification also thinks these fields are in InstanceKlass. 448 oop InstanceKlass::protection_domain() const { 449 // return the protection_domain from the mirror 450 return java_lang_Class::protection_domain(java_mirror()); 451 } 452 453 // To remove these from requires an incompatible change and CCC request. 454 objArrayOop InstanceKlass::signers() const { 455 // return the signers from the mirror 456 return java_lang_Class::signers(java_mirror()); 457 } 458 459 oop InstanceKlass::init_lock() const { 460 // return the init lock from the mirror 461 oop lock = java_lang_Class::init_lock(java_mirror()); 462 // Prevent reordering with any access of initialization state 463 OrderAccess::loadload(); 464 assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state 465 "only fully initialized state can have a null lock"); 466 return lock; 467 } 468 469 // Set the initialization lock to null so the object can be GC'ed. Any racing 470 // threads to get this lock will see a null lock and will not lock. 471 // That's okay because they all check for initialized state after getting 472 // the lock and return. 473 void InstanceKlass::fence_and_clear_init_lock() { 474 // make sure previous stores are all done, notably the init_state. 475 OrderAccess::storestore(); 476 java_lang_Class::set_init_lock(java_mirror(), NULL); 477 assert(!is_not_initialized(), "class must be initialized now"); 478 } 479 480 void InstanceKlass::eager_initialize_impl() { 481 EXCEPTION_MARK; 482 HandleMark hm(THREAD); 483 Handle h_init_lock(THREAD, init_lock()); 484 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 485 486 // abort if someone beat us to the initialization 487 if (!is_not_initialized()) return; // note: not equivalent to is_initialized() 488 489 ClassState old_state = init_state(); 490 link_class_impl(true, THREAD); 491 if (HAS_PENDING_EXCEPTION) { 492 CLEAR_PENDING_EXCEPTION; 493 // Abort if linking the class throws an exception. 494 495 // Use a test to avoid redundantly resetting the state if there's 496 // no change. Set_init_state() asserts that state changes make 497 // progress, whereas here we might just be spinning in place. 498 if (old_state != _init_state) 499 set_init_state(old_state); 500 } else { 501 // linking successfull, mark class as initialized 502 set_init_state(fully_initialized); 503 fence_and_clear_init_lock(); 504 // trace 505 if (log_is_enabled(Info, class, init)) { 506 ResourceMark rm(THREAD); 507 log_info(class, init)("[Initialized %s without side effects]", external_name()); 508 } 509 } 510 } 511 512 513 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization 514 // process. The step comments refers to the procedure described in that section. 515 // Note: implementation moved to static method to expose the this pointer. 516 void InstanceKlass::initialize(TRAPS) { 517 if (this->should_be_initialized()) { 518 initialize_impl(CHECK); 519 // Note: at this point the class may be initialized 520 // OR it may be in the state of being initialized 521 // in case of recursive initialization! 522 } else { 523 assert(is_initialized(), "sanity check"); 524 } 525 } 526 527 528 bool InstanceKlass::verify_code(bool throw_verifyerror, TRAPS) { 529 // 1) Verify the bytecodes 530 Verifier::Mode mode = 531 throw_verifyerror ? Verifier::ThrowException : Verifier::NoException; 532 return Verifier::verify(this, mode, should_verify_class(), THREAD); 533 } 534 535 536 // Used exclusively by the shared spaces dump mechanism to prevent 537 // classes mapped into the shared regions in new VMs from appearing linked. 538 539 void InstanceKlass::unlink_class() { 540 assert(is_linked(), "must be linked"); 541 _init_state = loaded; 542 } 543 544 void InstanceKlass::link_class(TRAPS) { 545 assert(is_loaded(), "must be loaded"); 546 if (!is_linked()) { 547 link_class_impl(true, CHECK); 548 } 549 } 550 551 // Called to verify that a class can link during initialization, without 552 // throwing a VerifyError. 553 bool InstanceKlass::link_class_or_fail(TRAPS) { 554 assert(is_loaded(), "must be loaded"); 555 if (!is_linked()) { 556 link_class_impl(false, CHECK_false); 557 } 558 return is_linked(); 559 } 560 561 bool InstanceKlass::link_class_impl(bool throw_verifyerror, TRAPS) { 562 if (DumpSharedSpaces && is_in_error_state()) { 563 // This is for CDS dumping phase only -- we use the in_error_state to indicate that 564 // the class has failed verification. Throwing the NoClassDefFoundError here is just 565 // a convenient way to stop repeat attempts to verify the same (bad) class. 566 // 567 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown 568 // if we are executing Java code. This is not a problem for CDS dumping phase since 569 // it doesn't execute any Java code. 570 ResourceMark rm(THREAD); 571 Exceptions::fthrow(THREAD_AND_LOCATION, 572 vmSymbols::java_lang_NoClassDefFoundError(), 573 "Class %s, or one of its supertypes, failed class initialization", 574 external_name()); 575 return false; 576 } 577 // return if already verified 578 if (is_linked()) { 579 return true; 580 } 581 582 // Timing 583 // timer handles recursion 584 assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl"); 585 JavaThread* jt = (JavaThread*)THREAD; 586 587 // link super class before linking this class 588 Klass* super_klass = super(); 589 if (super_klass != NULL) { 590 if (super_klass->is_interface()) { // check if super class is an interface 591 ResourceMark rm(THREAD); 592 Exceptions::fthrow( 593 THREAD_AND_LOCATION, 594 vmSymbols::java_lang_IncompatibleClassChangeError(), 595 "class %s has interface %s as super class", 596 external_name(), 597 super_klass->external_name() 598 ); 599 return false; 600 } 601 602 InstanceKlass* ik_super = InstanceKlass::cast(super_klass); 603 ik_super->link_class_impl(throw_verifyerror, CHECK_false); 604 } 605 606 // link all interfaces implemented by this class before linking this class 607 Array<Klass*>* interfaces = local_interfaces(); 608 int num_interfaces = interfaces->length(); 609 for (int index = 0; index < num_interfaces; index++) { 610 InstanceKlass* interk = InstanceKlass::cast(interfaces->at(index)); 611 interk->link_class_impl(throw_verifyerror, CHECK_false); 612 } 613 614 615 // If a class declares a method that uses a value class as an argument 616 // type or return value type, this value class must be loaded during the 617 // linking if this class because size and properties of the value class 618 // must be known in order to be able to perform value type optimizations. 619 // The implementation below is an approximation of this rule, the code 620 // iterates over all methods of the current class (including overridden 621 // methods), not only the methods declared by this class. This 622 // approximation makes the code simpler, and doesn't change the semantic 623 // because classes declaring methods overridden by the current class are 624 // linked (and have performed their own pre-loading) before the linking 625 // of the current class. 626 // This is also the moment where potential mismatch between the 627 // ValueTypes attribute and the kind of the class effectively loaded. 628 629 630 // Note: 631 // Value class types used for flattenable fields are loaded during 632 // the loading phase (see layout ClassFileParser::layout_fields()). 633 // Value class types used as element types for array creation 634 // are not pre-loaded. Their loading is triggered by either anewarray 635 // or multianewarray bytecodes. 636 637 { 638 ResourceMark rm(THREAD); 639 for (int i = 0; i < methods()->length(); i++) { 640 Method* m = methods()->at(i); 641 for (SignatureStream ss(m->signature()); !ss.is_done(); ss.next()) { 642 Symbol* sig = ss.as_symbol(THREAD); 643 if (is_declared_value_type(sig)) { 644 // Get current loader and protection domain first. 645 oop loader = class_loader(); 646 oop protection_domain = this->protection_domain(); 647 648 Klass* klass = SystemDictionary::resolve_or_fail(sig, 649 Handle(THREAD, loader), Handle(THREAD, protection_domain), true, 650 CHECK_false); 651 if (klass == NULL) { 652 THROW_(vmSymbols::java_lang_LinkageError(), false); 653 } 654 if (!klass->is_value()) { 655 THROW_(vmSymbols::java_lang_IncompatibleClassChangeError(), false); 656 } 657 } 658 } 659 } 660 } 661 662 // in case the class is linked in the process of linking its superclasses 663 if (is_linked()) { 664 return true; 665 } 666 667 // trace only the link time for this klass that includes 668 // the verification time 669 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(), 670 ClassLoader::perf_class_link_selftime(), 671 ClassLoader::perf_classes_linked(), 672 jt->get_thread_stat()->perf_recursion_counts_addr(), 673 jt->get_thread_stat()->perf_timers_addr(), 674 PerfClassTraceTime::CLASS_LINK); 675 676 // verification & rewriting 677 { 678 HandleMark hm(THREAD); 679 Handle h_init_lock(THREAD, init_lock()); 680 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 681 // rewritten will have been set if loader constraint error found 682 // on an earlier link attempt 683 // don't verify or rewrite if already rewritten 684 // 685 686 if (!is_linked()) { 687 if (!is_rewritten()) { 688 { 689 bool verify_ok = verify_code(throw_verifyerror, THREAD); 690 if (!verify_ok) { 691 return false; 692 } 693 } 694 695 // Just in case a side-effect of verify linked this class already 696 // (which can sometimes happen since the verifier loads classes 697 // using custom class loaders, which are free to initialize things) 698 if (is_linked()) { 699 return true; 700 } 701 702 // also sets rewritten 703 rewrite_class(CHECK_false); 704 } else if (is_shared()) { 705 SystemDictionaryShared::check_verification_constraints(this, CHECK_false); 706 } 707 708 // relocate jsrs and link methods after they are all rewritten 709 link_methods(CHECK_false); 710 711 // Initialize the vtable and interface table after 712 // methods have been rewritten since rewrite may 713 // fabricate new Method*s. 714 // also does loader constraint checking 715 // 716 // initialize_vtable and initialize_itable need to be rerun for 717 // a shared class if the class is not loaded by the NULL classloader. 718 ClassLoaderData * loader_data = class_loader_data(); 719 if (!(is_shared() && 720 loader_data->is_the_null_class_loader_data())) { 721 ResourceMark rm(THREAD); 722 vtable().initialize_vtable(true, CHECK_false); 723 itable().initialize_itable(true, CHECK_false); 724 } 725 #ifdef ASSERT 726 else { 727 vtable().verify(tty, true); 728 // In case itable verification is ever added. 729 // itable().verify(tty, true); 730 } 731 #endif 732 733 set_init_state(linked); 734 if (JvmtiExport::should_post_class_prepare()) { 735 Thread *thread = THREAD; 736 assert(thread->is_Java_thread(), "thread->is_Java_thread()"); 737 JvmtiExport::post_class_prepare((JavaThread *) thread, this); 738 } 739 } 740 } 741 return true; 742 } 743 744 745 // Rewrite the byte codes of all of the methods of a class. 746 // The rewriter must be called exactly once. Rewriting must happen after 747 // verification but before the first method of the class is executed. 748 void InstanceKlass::rewrite_class(TRAPS) { 749 assert(is_loaded(), "must be loaded"); 750 if (is_rewritten()) { 751 assert(is_shared(), "rewriting an unshared class?"); 752 return; 753 } 754 Rewriter::rewrite(this, CHECK); 755 set_rewritten(); 756 } 757 758 // Now relocate and link method entry points after class is rewritten. 759 // This is outside is_rewritten flag. In case of an exception, it can be 760 // executed more than once. 761 void InstanceKlass::link_methods(TRAPS) { 762 int len = methods()->length(); 763 for (int i = len-1; i >= 0; i--) { 764 methodHandle m(THREAD, methods()->at(i)); 765 766 // Set up method entry points for compiler and interpreter . 767 m->link_method(m, CHECK); 768 } 769 } 770 771 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access) 772 void InstanceKlass::initialize_super_interfaces(TRAPS) { 773 assert (has_nonstatic_concrete_methods(), "caller should have checked this"); 774 for (int i = 0; i < local_interfaces()->length(); ++i) { 775 Klass* iface = local_interfaces()->at(i); 776 InstanceKlass* ik = InstanceKlass::cast(iface); 777 778 // Initialization is depth first search ie. we start with top of the inheritance tree 779 // has_nonstatic_concrete_methods drives searching superinterfaces since it 780 // means has_nonstatic_concrete_methods in its superinterface hierarchy 781 if (ik->has_nonstatic_concrete_methods()) { 782 ik->initialize_super_interfaces(CHECK); 783 } 784 785 // Only initialize() interfaces that "declare" concrete methods. 786 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) { 787 ik->initialize(CHECK); 788 } 789 } 790 } 791 792 void InstanceKlass::initialize_impl(TRAPS) { 793 HandleMark hm(THREAD); 794 795 // Make sure klass is linked (verified) before initialization 796 // A class could already be verified, since it has been reflected upon. 797 link_class(CHECK); 798 799 DTRACE_CLASSINIT_PROBE(required, -1); 800 801 bool wait = false; 802 803 // refer to the JVM book page 47 for description of steps 804 // Step 1 805 { 806 Handle h_init_lock(THREAD, init_lock()); 807 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 808 809 Thread *self = THREAD; // it's passed the current thread 810 811 // Step 2 812 // If we were to use wait() instead of waitInterruptibly() then 813 // we might end up throwing IE from link/symbol resolution sites 814 // that aren't expected to throw. This would wreak havoc. See 6320309. 815 while(is_being_initialized() && !is_reentrant_initialization(self)) { 816 wait = true; 817 ol.waitUninterruptibly(CHECK); 818 } 819 820 // Step 3 821 if (is_being_initialized() && is_reentrant_initialization(self)) { 822 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait); 823 return; 824 } 825 826 // Step 4 827 if (is_initialized()) { 828 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait); 829 return; 830 } 831 832 // Step 5 833 if (is_in_error_state()) { 834 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait); 835 ResourceMark rm(THREAD); 836 const char* desc = "Could not initialize class "; 837 const char* className = external_name(); 838 size_t msglen = strlen(desc) + strlen(className) + 1; 839 char* message = NEW_RESOURCE_ARRAY(char, msglen); 840 if (NULL == message) { 841 // Out of memory: can't create detailed error message 842 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className); 843 } else { 844 jio_snprintf(message, msglen, "%s%s", desc, className); 845 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message); 846 } 847 } 848 849 // Step 6 850 set_init_state(being_initialized); 851 set_init_thread(self); 852 } 853 854 // Step 7 855 // Next, if C is a class rather than an interface, initialize it's super class and super 856 // interfaces. 857 if (!is_interface()) { 858 Klass* super_klass = super(); 859 if (super_klass != NULL && super_klass->should_be_initialized()) { 860 super_klass->initialize(THREAD); 861 } 862 // If C implements any interface that declares a non-static, concrete method, 863 // the initialization of C triggers initialization of its super interfaces. 864 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and 865 // having a superinterface that declares, non-static, concrete methods 866 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) { 867 initialize_super_interfaces(THREAD); 868 } 869 870 // If any exceptions, complete abruptly, throwing the same exception as above. 871 if (HAS_PENDING_EXCEPTION) { 872 Handle e(THREAD, PENDING_EXCEPTION); 873 CLEAR_PENDING_EXCEPTION; 874 { 875 EXCEPTION_MARK; 876 // Locks object, set state, and notify all waiting threads 877 set_initialization_state_and_notify(initialization_error, THREAD); 878 CLEAR_PENDING_EXCEPTION; 879 } 880 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait); 881 THROW_OOP(e()); 882 } 883 } 884 885 // Step 8 886 // Initialize classes of flattenable fields 887 { 888 for (AllFieldStream fs(this); !fs.done(); fs.next()) { 889 if (fs.is_flattenable()) { 890 InstanceKlass* field_klass = InstanceKlass::cast(this->get_value_field_klass(fs.index())); 891 field_klass->initialize(CHECK); 892 } 893 } 894 } 895 896 897 // Look for aot compiled methods for this klass, including class initializer. 898 AOTLoader::load_for_klass(this, THREAD); 899 900 // Step 9 901 { 902 assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl"); 903 JavaThread* jt = (JavaThread*)THREAD; 904 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait); 905 // Timer includes any side effects of class initialization (resolution, 906 // etc), but not recursive entry into call_class_initializer(). 907 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(), 908 ClassLoader::perf_class_init_selftime(), 909 ClassLoader::perf_classes_inited(), 910 jt->get_thread_stat()->perf_recursion_counts_addr(), 911 jt->get_thread_stat()->perf_timers_addr(), 912 PerfClassTraceTime::CLASS_CLINIT); 913 call_class_initializer(THREAD); 914 } 915 916 // Step 10 917 if (!HAS_PENDING_EXCEPTION) { 918 set_initialization_state_and_notify(fully_initialized, CHECK); 919 { 920 debug_only(vtable().verify(tty, true);) 921 } 922 } 923 else { 924 // Step 11 and 12 925 Handle e(THREAD, PENDING_EXCEPTION); 926 CLEAR_PENDING_EXCEPTION; 927 // JVMTI has already reported the pending exception 928 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 929 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 930 { 931 EXCEPTION_MARK; 932 set_initialization_state_and_notify(initialization_error, THREAD); 933 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below 934 // JVMTI has already reported the pending exception 935 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 936 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 937 } 938 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait); 939 if (e->is_a(SystemDictionary::Error_klass())) { 940 THROW_OOP(e()); 941 } else { 942 JavaCallArguments args(e); 943 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(), 944 vmSymbols::throwable_void_signature(), 945 &args); 946 } 947 } 948 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait); 949 } 950 951 952 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) { 953 Handle h_init_lock(THREAD, init_lock()); 954 if (h_init_lock() != NULL) { 955 ObjectLocker ol(h_init_lock, THREAD); 956 set_init_state(state); 957 fence_and_clear_init_lock(); 958 ol.notify_all(CHECK); 959 } else { 960 assert(h_init_lock() != NULL, "The initialization state should never be set twice"); 961 set_init_state(state); 962 } 963 } 964 965 // The embedded _implementor field can only record one implementor. 966 // When there are more than one implementors, the _implementor field 967 // is set to the interface Klass* itself. Following are the possible 968 // values for the _implementor field: 969 // NULL - no implementor 970 // implementor Klass* - one implementor 971 // self - more than one implementor 972 // 973 // The _implementor field only exists for interfaces. 974 void InstanceKlass::add_implementor(Klass* k) { 975 assert(Compile_lock->owned_by_self(), ""); 976 assert(is_interface(), "not interface"); 977 // Filter out my subinterfaces. 978 // (Note: Interfaces are never on the subklass list.) 979 if (InstanceKlass::cast(k)->is_interface()) return; 980 981 // Filter out subclasses whose supers already implement me. 982 // (Note: CHA must walk subclasses of direct implementors 983 // in order to locate indirect implementors.) 984 Klass* sk = k->super(); 985 if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this)) 986 // We only need to check one immediate superclass, since the 987 // implements_interface query looks at transitive_interfaces. 988 // Any supers of the super have the same (or fewer) transitive_interfaces. 989 return; 990 991 Klass* ik = implementor(); 992 if (ik == NULL) { 993 set_implementor(k); 994 } else if (ik != this) { 995 // There is already an implementor. Use itself as an indicator of 996 // more than one implementors. 997 set_implementor(this); 998 } 999 1000 // The implementor also implements the transitive_interfaces 1001 for (int index = 0; index < local_interfaces()->length(); index++) { 1002 InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k); 1003 } 1004 } 1005 1006 void InstanceKlass::init_implementor() { 1007 if (is_interface()) { 1008 set_implementor(NULL); 1009 } 1010 } 1011 1012 1013 void InstanceKlass::process_interfaces(Thread *thread) { 1014 // link this class into the implementors list of every interface it implements 1015 for (int i = local_interfaces()->length() - 1; i >= 0; i--) { 1016 assert(local_interfaces()->at(i)->is_klass(), "must be a klass"); 1017 InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i)); 1018 assert(interf->is_interface(), "expected interface"); 1019 interf->add_implementor(this); 1020 } 1021 } 1022 1023 bool InstanceKlass::can_be_primary_super_slow() const { 1024 if (is_interface()) 1025 return false; 1026 else 1027 return Klass::can_be_primary_super_slow(); 1028 } 1029 1030 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots) { 1031 // The secondaries are the implemented interfaces. 1032 Array<Klass*>* interfaces = transitive_interfaces(); 1033 int num_secondaries = num_extra_slots + interfaces->length(); 1034 if (num_secondaries == 0) { 1035 // Must share this for correct bootstrapping! 1036 set_secondary_supers(Universe::the_empty_klass_array()); 1037 return NULL; 1038 } else if (num_extra_slots == 0) { 1039 // The secondary super list is exactly the same as the transitive interfaces. 1040 // Redefine classes has to be careful not to delete this! 1041 set_secondary_supers(interfaces); 1042 return NULL; 1043 } else { 1044 // Copy transitive interfaces to a temporary growable array to be constructed 1045 // into the secondary super list with extra slots. 1046 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length()); 1047 for (int i = 0; i < interfaces->length(); i++) { 1048 secondaries->push(interfaces->at(i)); 1049 } 1050 return secondaries; 1051 } 1052 } 1053 1054 bool InstanceKlass::compute_is_subtype_of(Klass* k) { 1055 if (k->is_interface()) { 1056 return implements_interface(k); 1057 } else { 1058 return Klass::compute_is_subtype_of(k); 1059 } 1060 } 1061 1062 bool InstanceKlass::implements_interface(Klass* k) const { 1063 if (this == k) return true; 1064 assert(k->is_interface(), "should be an interface class"); 1065 for (int i = 0; i < transitive_interfaces()->length(); i++) { 1066 if (transitive_interfaces()->at(i) == k) { 1067 return true; 1068 } 1069 } 1070 return false; 1071 } 1072 1073 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const { 1074 // Verify direct super interface 1075 if (this == k) return true; 1076 assert(k->is_interface(), "should be an interface class"); 1077 for (int i = 0; i < local_interfaces()->length(); i++) { 1078 if (local_interfaces()->at(i) == k) { 1079 return true; 1080 } 1081 } 1082 return false; 1083 } 1084 1085 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) { 1086 if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException()); 1087 if (length > arrayOopDesc::max_array_length(T_OBJECT)) { 1088 report_java_out_of_memory("Requested array size exceeds VM limit"); 1089 JvmtiExport::post_array_size_exhausted(); 1090 THROW_OOP_0(Universe::out_of_memory_error_array_size()); 1091 } 1092 int size = objArrayOopDesc::object_size(length); 1093 Klass* ak = array_klass(n, CHECK_NULL); 1094 objArrayOop o = 1095 (objArrayOop)CollectedHeap::array_allocate(ak, size, length, CHECK_NULL); 1096 return o; 1097 } 1098 1099 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) { 1100 if (TraceFinalizerRegistration) { 1101 tty->print("Registered "); 1102 i->print_value_on(tty); 1103 tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i)); 1104 } 1105 instanceHandle h_i(THREAD, i); 1106 // Pass the handle as argument, JavaCalls::call expects oop as jobjects 1107 JavaValue result(T_VOID); 1108 JavaCallArguments args(h_i); 1109 methodHandle mh (THREAD, Universe::finalizer_register_method()); 1110 JavaCalls::call(&result, mh, &args, CHECK_NULL); 1111 return h_i(); 1112 } 1113 1114 instanceOop InstanceKlass::allocate_instance(TRAPS) { 1115 bool has_finalizer_flag = has_finalizer(); // Query before possible GC 1116 int size = size_helper(); // Query before forming handle. 1117 1118 instanceOop i; 1119 1120 i = (instanceOop)CollectedHeap::obj_allocate(this, size, CHECK_NULL); 1121 if (has_finalizer_flag && !RegisterFinalizersAtInit) { 1122 i = register_finalizer(i, CHECK_NULL); 1123 } 1124 return i; 1125 } 1126 1127 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) { 1128 if (is_interface() || is_abstract()) { 1129 ResourceMark rm(THREAD); 1130 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError() 1131 : vmSymbols::java_lang_InstantiationException(), external_name()); 1132 } 1133 if (this == SystemDictionary::Class_klass()) { 1134 ResourceMark rm(THREAD); 1135 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError() 1136 : vmSymbols::java_lang_IllegalAccessException(), external_name()); 1137 } 1138 } 1139 1140 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) { 1141 // Need load-acquire for lock-free read 1142 if (array_klasses_acquire() == NULL) { 1143 if (or_null) return NULL; 1144 1145 ResourceMark rm; 1146 JavaThread *jt = (JavaThread *)THREAD; 1147 { 1148 // Atomic creation of array_klasses 1149 MutexLocker mc(Compile_lock, THREAD); // for vtables 1150 MutexLocker ma(MultiArray_lock, THREAD); 1151 1152 // Check if update has already taken place 1153 if (array_klasses() == NULL) { 1154 Klass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL); 1155 // use 'release' to pair with lock-free load 1156 release_set_array_klasses(k); 1157 } 1158 } 1159 } 1160 // _this will always be set at this point 1161 ObjArrayKlass* oak = (ObjArrayKlass*)array_klasses(); 1162 if (or_null) { 1163 return oak->array_klass_or_null(n); 1164 } 1165 return oak->array_klass(n, THREAD); 1166 } 1167 1168 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) { 1169 return array_klass_impl(or_null, 1, THREAD); 1170 } 1171 1172 static int call_class_initializer_counter = 0; // for debugging 1173 1174 Method* InstanceKlass::class_initializer() const { 1175 Method* clinit = find_method( 1176 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature()); 1177 if (clinit != NULL && clinit->has_valid_initializer_flags()) { 1178 return clinit; 1179 } 1180 return NULL; 1181 } 1182 1183 void InstanceKlass::call_class_initializer(TRAPS) { 1184 if (ReplayCompiles && 1185 (ReplaySuppressInitializers == 1 || 1186 (ReplaySuppressInitializers >= 2 && class_loader() != NULL))) { 1187 // Hide the existence of the initializer for the purpose of replaying the compile 1188 return; 1189 } 1190 1191 methodHandle h_method(THREAD, class_initializer()); 1192 assert(!is_initialized(), "we cannot initialize twice"); 1193 LogTarget(Info, class, init) lt; 1194 if (lt.is_enabled()) { 1195 ResourceMark rm; 1196 LogStream ls(lt); 1197 ls.print("%d Initializing ", call_class_initializer_counter++); 1198 name()->print_value_on(&ls); 1199 ls.print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this)); 1200 } 1201 if (h_method() != NULL) { 1202 JavaCallArguments args; // No arguments 1203 JavaValue result(T_VOID); 1204 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args) 1205 } 1206 } 1207 1208 1209 void InstanceKlass::mask_for(const methodHandle& method, int bci, 1210 InterpreterOopMap* entry_for) { 1211 // Lazily create the _oop_map_cache at first request 1212 // Lock-free access requires load_acquire. 1213 OopMapCache* oop_map_cache = OrderAccess::load_acquire(&_oop_map_cache); 1214 if (oop_map_cache == NULL) { 1215 MutexLockerEx x(OopMapCacheAlloc_lock, Mutex::_no_safepoint_check_flag); 1216 // Check if _oop_map_cache was allocated while we were waiting for this lock 1217 if ((oop_map_cache = _oop_map_cache) == NULL) { 1218 oop_map_cache = new OopMapCache(); 1219 // Ensure _oop_map_cache is stable, since it is examined without a lock 1220 OrderAccess::release_store(&_oop_map_cache, oop_map_cache); 1221 } 1222 } 1223 // _oop_map_cache is constant after init; lookup below does its own locking. 1224 oop_map_cache->lookup(method, bci, entry_for); 1225 } 1226 1227 1228 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1229 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1230 Symbol* f_name = fs.name(); 1231 Symbol* f_sig = fs.signature(); 1232 if (f_name == name && f_sig == sig) { 1233 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1234 return true; 1235 } 1236 } 1237 return false; 1238 } 1239 1240 1241 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1242 const int n = local_interfaces()->length(); 1243 for (int i = 0; i < n; i++) { 1244 Klass* intf1 = local_interfaces()->at(i); 1245 assert(intf1->is_interface(), "just checking type"); 1246 // search for field in current interface 1247 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) { 1248 assert(fd->is_static(), "interface field must be static"); 1249 return intf1; 1250 } 1251 // search for field in direct superinterfaces 1252 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd); 1253 if (intf2 != NULL) return intf2; 1254 } 1255 // otherwise field lookup fails 1256 return NULL; 1257 } 1258 1259 1260 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1261 // search order according to newest JVM spec (5.4.3.2, p.167). 1262 // 1) search for field in current klass 1263 if (find_local_field(name, sig, fd)) { 1264 return const_cast<InstanceKlass*>(this); 1265 } 1266 // 2) search for field recursively in direct superinterfaces 1267 { Klass* intf = find_interface_field(name, sig, fd); 1268 if (intf != NULL) return intf; 1269 } 1270 // 3) apply field lookup recursively if superclass exists 1271 { Klass* supr = super(); 1272 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd); 1273 } 1274 // 4) otherwise field lookup fails 1275 return NULL; 1276 } 1277 1278 1279 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const { 1280 // search order according to newest JVM spec (5.4.3.2, p.167). 1281 // 1) search for field in current klass 1282 if (find_local_field(name, sig, fd)) { 1283 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this); 1284 } 1285 // 2) search for field recursively in direct superinterfaces 1286 if (is_static) { 1287 Klass* intf = find_interface_field(name, sig, fd); 1288 if (intf != NULL) return intf; 1289 } 1290 // 3) apply field lookup recursively if superclass exists 1291 { Klass* supr = super(); 1292 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd); 1293 } 1294 // 4) otherwise field lookup fails 1295 return NULL; 1296 } 1297 1298 1299 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1300 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1301 if (fs.offset() == offset) { 1302 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1303 if (fd->is_static() == is_static) return true; 1304 } 1305 } 1306 return false; 1307 } 1308 1309 1310 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1311 Klass* klass = const_cast<InstanceKlass*>(this); 1312 while (klass != NULL) { 1313 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) { 1314 return true; 1315 } 1316 klass = klass->super(); 1317 } 1318 return false; 1319 } 1320 1321 1322 void InstanceKlass::methods_do(void f(Method* method)) { 1323 // Methods aren't stable until they are loaded. This can be read outside 1324 // a lock through the ClassLoaderData for profiling 1325 if (!is_loaded()) { 1326 return; 1327 } 1328 1329 int len = methods()->length(); 1330 for (int index = 0; index < len; index++) { 1331 Method* m = methods()->at(index); 1332 assert(m->is_method(), "must be method"); 1333 f(m); 1334 } 1335 } 1336 1337 1338 void InstanceKlass::do_local_static_fields(FieldClosure* cl) { 1339 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1340 if (fs.access_flags().is_static()) { 1341 fieldDescriptor& fd = fs.field_descriptor(); 1342 cl->do_field(&fd); 1343 } 1344 } 1345 } 1346 1347 1348 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) { 1349 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1350 if (fs.access_flags().is_static()) { 1351 fieldDescriptor& fd = fs.field_descriptor(); 1352 f(&fd, mirror, CHECK); 1353 } 1354 } 1355 } 1356 1357 1358 static int compare_fields_by_offset(int* a, int* b) { 1359 return a[0] - b[0]; 1360 } 1361 1362 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) { 1363 InstanceKlass* super = superklass(); 1364 if (super != NULL) { 1365 super->do_nonstatic_fields(cl); 1366 } 1367 fieldDescriptor fd; 1368 int length = java_fields_count(); 1369 // In DebugInfo nonstatic fields are sorted by offset. 1370 int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass); 1371 int j = 0; 1372 for (int i = 0; i < length; i += 1) { 1373 fd.reinitialize(this, i); 1374 if (!fd.is_static()) { 1375 fields_sorted[j + 0] = fd.offset(); 1376 fields_sorted[j + 1] = i; 1377 j += 2; 1378 } 1379 } 1380 if (j > 0) { 1381 length = j; 1382 // _sort_Fn is defined in growableArray.hpp. 1383 qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset); 1384 for (int i = 0; i < length; i += 2) { 1385 fd.reinitialize(this, fields_sorted[i + 1]); 1386 assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields"); 1387 cl->do_field(&fd); 1388 } 1389 } 1390 FREE_C_HEAP_ARRAY(int, fields_sorted); 1391 } 1392 1393 1394 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) { 1395 if (array_klasses() != NULL) 1396 ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD); 1397 } 1398 1399 void InstanceKlass::array_klasses_do(void f(Klass* k)) { 1400 if (array_klasses() != NULL) 1401 ArrayKlass::cast(array_klasses())->array_klasses_do(f); 1402 } 1403 1404 #ifdef ASSERT 1405 static int linear_search(const Array<Method*>* methods, 1406 const Symbol* name, 1407 const Symbol* signature) { 1408 const int len = methods->length(); 1409 for (int index = 0; index < len; index++) { 1410 const Method* const m = methods->at(index); 1411 assert(m->is_method(), "must be method"); 1412 if (m->signature() == signature && m->name() == name) { 1413 return index; 1414 } 1415 } 1416 return -1; 1417 } 1418 #endif 1419 1420 static int binary_search(const Array<Method*>* methods, const Symbol* name) { 1421 int len = methods->length(); 1422 // methods are sorted, so do binary search 1423 int l = 0; 1424 int h = len - 1; 1425 while (l <= h) { 1426 int mid = (l + h) >> 1; 1427 Method* m = methods->at(mid); 1428 assert(m->is_method(), "must be method"); 1429 int res = m->name()->fast_compare(name); 1430 if (res == 0) { 1431 return mid; 1432 } else if (res < 0) { 1433 l = mid + 1; 1434 } else { 1435 h = mid - 1; 1436 } 1437 } 1438 return -1; 1439 } 1440 1441 // find_method looks up the name/signature in the local methods array 1442 Method* InstanceKlass::find_method(const Symbol* name, 1443 const Symbol* signature) const { 1444 return find_method_impl(name, signature, find_overpass, find_static, find_private); 1445 } 1446 1447 Method* InstanceKlass::find_method_impl(const Symbol* name, 1448 const Symbol* signature, 1449 OverpassLookupMode overpass_mode, 1450 StaticLookupMode static_mode, 1451 PrivateLookupMode private_mode) const { 1452 return InstanceKlass::find_method_impl(methods(), 1453 name, 1454 signature, 1455 overpass_mode, 1456 static_mode, 1457 private_mode); 1458 } 1459 1460 // find_instance_method looks up the name/signature in the local methods array 1461 // and skips over static methods 1462 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods, 1463 const Symbol* name, 1464 const Symbol* signature) { 1465 Method* const meth = InstanceKlass::find_method_impl(methods, 1466 name, 1467 signature, 1468 find_overpass, 1469 skip_static, 1470 find_private); 1471 assert(((meth == NULL) || !meth->is_static()), 1472 "find_instance_method should have skipped statics"); 1473 return meth; 1474 } 1475 1476 // find_instance_method looks up the name/signature in the local methods array 1477 // and skips over static methods 1478 Method* InstanceKlass::find_instance_method(const Symbol* name, const Symbol* signature) const { 1479 return InstanceKlass::find_instance_method(methods(), name, signature); 1480 } 1481 1482 // Find looks up the name/signature in the local methods array 1483 // and filters on the overpass, static and private flags 1484 // This returns the first one found 1485 // note that the local methods array can have up to one overpass, one static 1486 // and one instance (private or not) with the same name/signature 1487 Method* InstanceKlass::find_local_method(const Symbol* name, 1488 const Symbol* signature, 1489 OverpassLookupMode overpass_mode, 1490 StaticLookupMode static_mode, 1491 PrivateLookupMode private_mode) const { 1492 return InstanceKlass::find_method_impl(methods(), 1493 name, 1494 signature, 1495 overpass_mode, 1496 static_mode, 1497 private_mode); 1498 } 1499 1500 // Find looks up the name/signature in the local methods array 1501 // and filters on the overpass, static and private flags 1502 // This returns the first one found 1503 // note that the local methods array can have up to one overpass, one static 1504 // and one instance (private or not) with the same name/signature 1505 Method* InstanceKlass::find_local_method(const Array<Method*>* methods, 1506 const Symbol* name, 1507 const Symbol* signature, 1508 OverpassLookupMode overpass_mode, 1509 StaticLookupMode static_mode, 1510 PrivateLookupMode private_mode) { 1511 return InstanceKlass::find_method_impl(methods, 1512 name, 1513 signature, 1514 overpass_mode, 1515 static_mode, 1516 private_mode); 1517 } 1518 1519 Method* InstanceKlass::find_method(const Array<Method*>* methods, 1520 const Symbol* name, 1521 const Symbol* signature) { 1522 return InstanceKlass::find_method_impl(methods, 1523 name, 1524 signature, 1525 find_overpass, 1526 find_static, 1527 find_private); 1528 } 1529 1530 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods, 1531 const Symbol* name, 1532 const Symbol* signature, 1533 OverpassLookupMode overpass_mode, 1534 StaticLookupMode static_mode, 1535 PrivateLookupMode private_mode) { 1536 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode); 1537 return hit >= 0 ? methods->at(hit): NULL; 1538 } 1539 1540 // true if method matches signature and conforms to skipping_X conditions. 1541 static bool method_matches(const Method* m, 1542 const Symbol* signature, 1543 bool skipping_overpass, 1544 bool skipping_static, 1545 bool skipping_private) { 1546 return ((m->signature() == signature) && 1547 (!skipping_overpass || !m->is_overpass()) && 1548 (!skipping_static || !m->is_static()) && 1549 (!skipping_private || !m->is_private())); 1550 } 1551 1552 // Used directly for default_methods to find the index into the 1553 // default_vtable_indices, and indirectly by find_method 1554 // find_method_index looks in the local methods array to return the index 1555 // of the matching name/signature. If, overpass methods are being ignored, 1556 // the search continues to find a potential non-overpass match. This capability 1557 // is important during method resolution to prefer a static method, for example, 1558 // over an overpass method. 1559 // There is the possibility in any _method's array to have the same name/signature 1560 // for a static method, an overpass method and a local instance method 1561 // To correctly catch a given method, the search criteria may need 1562 // to explicitly skip the other two. For local instance methods, it 1563 // is often necessary to skip private methods 1564 int InstanceKlass::find_method_index(const Array<Method*>* methods, 1565 const Symbol* name, 1566 const Symbol* signature, 1567 OverpassLookupMode overpass_mode, 1568 StaticLookupMode static_mode, 1569 PrivateLookupMode private_mode) { 1570 const bool skipping_overpass = (overpass_mode == skip_overpass); 1571 const bool skipping_static = (static_mode == skip_static); 1572 const bool skipping_private = (private_mode == skip_private); 1573 const int hit = binary_search(methods, name); 1574 if (hit != -1) { 1575 const Method* const m = methods->at(hit); 1576 1577 // Do linear search to find matching signature. First, quick check 1578 // for common case, ignoring overpasses if requested. 1579 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1580 return hit; 1581 } 1582 1583 // search downwards through overloaded methods 1584 int i; 1585 for (i = hit - 1; i >= 0; --i) { 1586 const Method* const m = methods->at(i); 1587 assert(m->is_method(), "must be method"); 1588 if (m->name() != name) { 1589 break; 1590 } 1591 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1592 return i; 1593 } 1594 } 1595 // search upwards 1596 for (i = hit + 1; i < methods->length(); ++i) { 1597 const Method* const m = methods->at(i); 1598 assert(m->is_method(), "must be method"); 1599 if (m->name() != name) { 1600 break; 1601 } 1602 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1603 return i; 1604 } 1605 } 1606 // not found 1607 #ifdef ASSERT 1608 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 : 1609 linear_search(methods, name, signature); 1610 assert(-1 == index, "binary search should have found entry %d", index); 1611 #endif 1612 } 1613 return -1; 1614 } 1615 1616 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const { 1617 return find_method_by_name(methods(), name, end); 1618 } 1619 1620 int InstanceKlass::find_method_by_name(const Array<Method*>* methods, 1621 const Symbol* name, 1622 int* end_ptr) { 1623 assert(end_ptr != NULL, "just checking"); 1624 int start = binary_search(methods, name); 1625 int end = start + 1; 1626 if (start != -1) { 1627 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start; 1628 while (end < methods->length() && (methods->at(end))->name() == name) ++end; 1629 *end_ptr = end; 1630 return start; 1631 } 1632 return -1; 1633 } 1634 1635 // uncached_lookup_method searches both the local class methods array and all 1636 // superclasses methods arrays, skipping any overpass methods in superclasses. 1637 Method* InstanceKlass::uncached_lookup_method(const Symbol* name, 1638 const Symbol* signature, 1639 OverpassLookupMode overpass_mode) const { 1640 OverpassLookupMode overpass_local_mode = overpass_mode; 1641 const Klass* klass = this; 1642 while (klass != NULL) { 1643 Method* const method = InstanceKlass::cast(klass)->find_method_impl(name, 1644 signature, 1645 overpass_local_mode, 1646 find_static, 1647 find_private); 1648 if (method != NULL) { 1649 return method; 1650 } 1651 klass = klass->super(); 1652 overpass_local_mode = skip_overpass; // Always ignore overpass methods in superclasses 1653 } 1654 return NULL; 1655 } 1656 1657 #ifdef ASSERT 1658 // search through class hierarchy and return true if this class or 1659 // one of the superclasses was redefined 1660 bool InstanceKlass::has_redefined_this_or_super() const { 1661 const Klass* klass = this; 1662 while (klass != NULL) { 1663 if (InstanceKlass::cast(klass)->has_been_redefined()) { 1664 return true; 1665 } 1666 klass = klass->super(); 1667 } 1668 return false; 1669 } 1670 #endif 1671 1672 // lookup a method in the default methods list then in all transitive interfaces 1673 // Do NOT return private or static methods 1674 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name, 1675 Symbol* signature) const { 1676 Method* m = NULL; 1677 if (default_methods() != NULL) { 1678 m = find_method(default_methods(), name, signature); 1679 } 1680 // Look up interfaces 1681 if (m == NULL) { 1682 m = lookup_method_in_all_interfaces(name, signature, find_defaults); 1683 } 1684 return m; 1685 } 1686 1687 // lookup a method in all the interfaces that this class implements 1688 // Do NOT return private or static methods, new in JDK8 which are not externally visible 1689 // They should only be found in the initial InterfaceMethodRef 1690 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name, 1691 Symbol* signature, 1692 DefaultsLookupMode defaults_mode) const { 1693 Array<Klass*>* all_ifs = transitive_interfaces(); 1694 int num_ifs = all_ifs->length(); 1695 InstanceKlass *ik = NULL; 1696 for (int i = 0; i < num_ifs; i++) { 1697 ik = InstanceKlass::cast(all_ifs->at(i)); 1698 Method* m = ik->lookup_method(name, signature); 1699 if (m != NULL && m->is_public() && !m->is_static() && 1700 ((defaults_mode != skip_defaults) || !m->is_default_method())) { 1701 return m; 1702 } 1703 } 1704 return NULL; 1705 } 1706 1707 /* jni_id_for_impl for jfieldIds only */ 1708 JNIid* InstanceKlass::jni_id_for_impl(int offset) { 1709 MutexLocker ml(JfieldIdCreation_lock); 1710 // Retry lookup after we got the lock 1711 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1712 if (probe == NULL) { 1713 // Slow case, allocate new static field identifier 1714 probe = new JNIid(this, offset, jni_ids()); 1715 set_jni_ids(probe); 1716 } 1717 return probe; 1718 } 1719 1720 1721 /* jni_id_for for jfieldIds only */ 1722 JNIid* InstanceKlass::jni_id_for(int offset) { 1723 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1724 if (probe == NULL) { 1725 probe = jni_id_for_impl(offset); 1726 } 1727 return probe; 1728 } 1729 1730 u2 InstanceKlass::enclosing_method_data(int offset) const { 1731 const Array<jushort>* const inner_class_list = inner_classes(); 1732 if (inner_class_list == NULL) { 1733 return 0; 1734 } 1735 const int length = inner_class_list->length(); 1736 if (length % inner_class_next_offset == 0) { 1737 return 0; 1738 } 1739 const int index = length - enclosing_method_attribute_size; 1740 assert(offset < enclosing_method_attribute_size, "invalid offset"); 1741 return inner_class_list->at(index + offset); 1742 } 1743 1744 void InstanceKlass::set_enclosing_method_indices(u2 class_index, 1745 u2 method_index) { 1746 Array<jushort>* inner_class_list = inner_classes(); 1747 assert (inner_class_list != NULL, "_inner_classes list is not set up"); 1748 int length = inner_class_list->length(); 1749 if (length % inner_class_next_offset == enclosing_method_attribute_size) { 1750 int index = length - enclosing_method_attribute_size; 1751 inner_class_list->at_put( 1752 index + enclosing_method_class_index_offset, class_index); 1753 inner_class_list->at_put( 1754 index + enclosing_method_method_index_offset, method_index); 1755 } 1756 } 1757 1758 // Lookup or create a jmethodID. 1759 // This code is called by the VMThread and JavaThreads so the 1760 // locking has to be done very carefully to avoid deadlocks 1761 // and/or other cache consistency problems. 1762 // 1763 jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) { 1764 size_t idnum = (size_t)method_h->method_idnum(); 1765 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1766 size_t length = 0; 1767 jmethodID id = NULL; 1768 1769 // We use a double-check locking idiom here because this cache is 1770 // performance sensitive. In the normal system, this cache only 1771 // transitions from NULL to non-NULL which is safe because we use 1772 // release_set_methods_jmethod_ids() to advertise the new cache. 1773 // A partially constructed cache should never be seen by a racing 1774 // thread. We also use release_store() to save a new jmethodID 1775 // in the cache so a partially constructed jmethodID should never be 1776 // seen either. Cache reads of existing jmethodIDs proceed without a 1777 // lock, but cache writes of a new jmethodID requires uniqueness and 1778 // creation of the cache itself requires no leaks so a lock is 1779 // generally acquired in those two cases. 1780 // 1781 // If the RedefineClasses() API has been used, then this cache can 1782 // grow and we'll have transitions from non-NULL to bigger non-NULL. 1783 // Cache creation requires no leaks and we require safety between all 1784 // cache accesses and freeing of the old cache so a lock is generally 1785 // acquired when the RedefineClasses() API has been used. 1786 1787 if (jmeths != NULL) { 1788 // the cache already exists 1789 if (!idnum_can_increment()) { 1790 // the cache can't grow so we can just get the current values 1791 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1792 } else { 1793 // cache can grow so we have to be more careful 1794 if (Threads::number_of_threads() == 0 || 1795 SafepointSynchronize::is_at_safepoint()) { 1796 // we're single threaded or at a safepoint - no locking needed 1797 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1798 } else { 1799 MutexLocker ml(JmethodIdCreation_lock); 1800 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1801 } 1802 } 1803 } 1804 // implied else: 1805 // we need to allocate a cache so default length and id values are good 1806 1807 if (jmeths == NULL || // no cache yet 1808 length <= idnum || // cache is too short 1809 id == NULL) { // cache doesn't contain entry 1810 1811 // This function can be called by the VMThread so we have to do all 1812 // things that might block on a safepoint before grabbing the lock. 1813 // Otherwise, we can deadlock with the VMThread or have a cache 1814 // consistency issue. These vars keep track of what we might have 1815 // to free after the lock is dropped. 1816 jmethodID to_dealloc_id = NULL; 1817 jmethodID* to_dealloc_jmeths = NULL; 1818 1819 // may not allocate new_jmeths or use it if we allocate it 1820 jmethodID* new_jmeths = NULL; 1821 if (length <= idnum) { 1822 // allocate a new cache that might be used 1823 size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count()); 1824 new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass); 1825 memset(new_jmeths, 0, (size+1)*sizeof(jmethodID)); 1826 // cache size is stored in element[0], other elements offset by one 1827 new_jmeths[0] = (jmethodID)size; 1828 } 1829 1830 // allocate a new jmethodID that might be used 1831 jmethodID new_id = NULL; 1832 if (method_h->is_old() && !method_h->is_obsolete()) { 1833 // The method passed in is old (but not obsolete), we need to use the current version 1834 Method* current_method = method_with_idnum((int)idnum); 1835 assert(current_method != NULL, "old and but not obsolete, so should exist"); 1836 new_id = Method::make_jmethod_id(class_loader_data(), current_method); 1837 } else { 1838 // It is the current version of the method or an obsolete method, 1839 // use the version passed in 1840 new_id = Method::make_jmethod_id(class_loader_data(), method_h()); 1841 } 1842 1843 if (Threads::number_of_threads() == 0 || 1844 SafepointSynchronize::is_at_safepoint()) { 1845 // we're single threaded or at a safepoint - no locking needed 1846 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths, 1847 &to_dealloc_id, &to_dealloc_jmeths); 1848 } else { 1849 MutexLocker ml(JmethodIdCreation_lock); 1850 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths, 1851 &to_dealloc_id, &to_dealloc_jmeths); 1852 } 1853 1854 // The lock has been dropped so we can free resources. 1855 // Free up either the old cache or the new cache if we allocated one. 1856 if (to_dealloc_jmeths != NULL) { 1857 FreeHeap(to_dealloc_jmeths); 1858 } 1859 // free up the new ID since it wasn't needed 1860 if (to_dealloc_id != NULL) { 1861 Method::destroy_jmethod_id(class_loader_data(), to_dealloc_id); 1862 } 1863 } 1864 return id; 1865 } 1866 1867 // Figure out how many jmethodIDs haven't been allocated, and make 1868 // sure space for them is pre-allocated. This makes getting all 1869 // method ids much, much faster with classes with more than 8 1870 // methods, and has a *substantial* effect on performance with jvmti 1871 // code that loads all jmethodIDs for all classes. 1872 void InstanceKlass::ensure_space_for_methodids(int start_offset) { 1873 int new_jmeths = 0; 1874 int length = methods()->length(); 1875 for (int index = start_offset; index < length; index++) { 1876 Method* m = methods()->at(index); 1877 jmethodID id = m->find_jmethod_id_or_null(); 1878 if (id == NULL) { 1879 new_jmeths++; 1880 } 1881 } 1882 if (new_jmeths != 0) { 1883 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths); 1884 } 1885 } 1886 1887 // Common code to fetch the jmethodID from the cache or update the 1888 // cache with the new jmethodID. This function should never do anything 1889 // that causes the caller to go to a safepoint or we can deadlock with 1890 // the VMThread or have cache consistency issues. 1891 // 1892 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update( 1893 size_t idnum, jmethodID new_id, 1894 jmethodID* new_jmeths, jmethodID* to_dealloc_id_p, 1895 jmethodID** to_dealloc_jmeths_p) { 1896 assert(new_id != NULL, "sanity check"); 1897 assert(to_dealloc_id_p != NULL, "sanity check"); 1898 assert(to_dealloc_jmeths_p != NULL, "sanity check"); 1899 assert(Threads::number_of_threads() == 0 || 1900 SafepointSynchronize::is_at_safepoint() || 1901 JmethodIdCreation_lock->owned_by_self(), "sanity check"); 1902 1903 // reacquire the cache - we are locked, single threaded or at a safepoint 1904 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1905 jmethodID id = NULL; 1906 size_t length = 0; 1907 1908 if (jmeths == NULL || // no cache yet 1909 (length = (size_t)jmeths[0]) <= idnum) { // cache is too short 1910 if (jmeths != NULL) { 1911 // copy any existing entries from the old cache 1912 for (size_t index = 0; index < length; index++) { 1913 new_jmeths[index+1] = jmeths[index+1]; 1914 } 1915 *to_dealloc_jmeths_p = jmeths; // save old cache for later delete 1916 } 1917 release_set_methods_jmethod_ids(jmeths = new_jmeths); 1918 } else { 1919 // fetch jmethodID (if any) from the existing cache 1920 id = jmeths[idnum+1]; 1921 *to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete 1922 } 1923 if (id == NULL) { 1924 // No matching jmethodID in the existing cache or we have a new 1925 // cache or we just grew the cache. This cache write is done here 1926 // by the first thread to win the foot race because a jmethodID 1927 // needs to be unique once it is generally available. 1928 id = new_id; 1929 1930 // The jmethodID cache can be read while unlocked so we have to 1931 // make sure the new jmethodID is complete before installing it 1932 // in the cache. 1933 OrderAccess::release_store(&jmeths[idnum+1], id); 1934 } else { 1935 *to_dealloc_id_p = new_id; // save new id for later delete 1936 } 1937 return id; 1938 } 1939 1940 1941 // Common code to get the jmethodID cache length and the jmethodID 1942 // value at index idnum if there is one. 1943 // 1944 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache, 1945 size_t idnum, size_t *length_p, jmethodID* id_p) { 1946 assert(cache != NULL, "sanity check"); 1947 assert(length_p != NULL, "sanity check"); 1948 assert(id_p != NULL, "sanity check"); 1949 1950 // cache size is stored in element[0], other elements offset by one 1951 *length_p = (size_t)cache[0]; 1952 if (*length_p <= idnum) { // cache is too short 1953 *id_p = NULL; 1954 } else { 1955 *id_p = cache[idnum+1]; // fetch jmethodID (if any) 1956 } 1957 } 1958 1959 1960 // Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles 1961 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) { 1962 size_t idnum = (size_t)method->method_idnum(); 1963 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1964 size_t length; // length assigned as debugging crumb 1965 jmethodID id = NULL; 1966 if (jmeths != NULL && // If there is a cache 1967 (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough, 1968 id = jmeths[idnum+1]; // Look up the id (may be NULL) 1969 } 1970 return id; 1971 } 1972 1973 inline DependencyContext InstanceKlass::dependencies() { 1974 DependencyContext dep_context(&_dep_context); 1975 return dep_context; 1976 } 1977 1978 int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) { 1979 return dependencies().mark_dependent_nmethods(changes); 1980 } 1981 1982 void InstanceKlass::add_dependent_nmethod(nmethod* nm) { 1983 dependencies().add_dependent_nmethod(nm); 1984 } 1985 1986 void InstanceKlass::remove_dependent_nmethod(nmethod* nm, bool delete_immediately) { 1987 dependencies().remove_dependent_nmethod(nm, delete_immediately); 1988 } 1989 1990 #ifndef PRODUCT 1991 void InstanceKlass::print_dependent_nmethods(bool verbose) { 1992 dependencies().print_dependent_nmethods(verbose); 1993 } 1994 1995 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) { 1996 return dependencies().is_dependent_nmethod(nm); 1997 } 1998 #endif //PRODUCT 1999 2000 void InstanceKlass::clean_weak_instanceklass_links(BoolObjectClosure* is_alive) { 2001 clean_implementors_list(is_alive); 2002 clean_method_data(is_alive); 2003 2004 // Since GC iterates InstanceKlasses sequentially, it is safe to remove stale entries here. 2005 DependencyContext dep_context(&_dep_context); 2006 dep_context.expunge_stale_entries(); 2007 } 2008 2009 void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) { 2010 assert(class_loader_data()->is_alive(is_alive), "this klass should be live"); 2011 if (is_interface()) { 2012 if (ClassUnloading) { 2013 Klass* impl = implementor(); 2014 if (impl != NULL) { 2015 if (!impl->is_loader_alive(is_alive)) { 2016 // remove this guy 2017 Klass** klass = adr_implementor(); 2018 assert(klass != NULL, "null klass"); 2019 if (klass != NULL) { 2020 *klass = NULL; 2021 } 2022 } 2023 } 2024 } 2025 } 2026 } 2027 2028 void InstanceKlass::clean_method_data(BoolObjectClosure* is_alive) { 2029 for (int m = 0; m < methods()->length(); m++) { 2030 MethodData* mdo = methods()->at(m)->method_data(); 2031 if (mdo != NULL) { 2032 mdo->clean_method_data(is_alive); 2033 } 2034 } 2035 } 2036 2037 bool InstanceKlass::supers_have_passed_fingerprint_checks() { 2038 if (java_super() != NULL && !java_super()->has_passed_fingerprint_check()) { 2039 ResourceMark rm; 2040 log_trace(class, fingerprint)("%s : super %s not fingerprinted", external_name(), java_super()->external_name()); 2041 return false; 2042 } 2043 2044 Array<Klass*>* local_interfaces = this->local_interfaces(); 2045 if (local_interfaces != NULL) { 2046 int length = local_interfaces->length(); 2047 for (int i = 0; i < length; i++) { 2048 InstanceKlass* intf = InstanceKlass::cast(local_interfaces->at(i)); 2049 if (!intf->has_passed_fingerprint_check()) { 2050 ResourceMark rm; 2051 log_trace(class, fingerprint)("%s : interface %s not fingerprinted", external_name(), intf->external_name()); 2052 return false; 2053 } 2054 } 2055 } 2056 2057 return true; 2058 } 2059 2060 bool InstanceKlass::should_store_fingerprint(bool is_anonymous) { 2061 #if INCLUDE_AOT 2062 // We store the fingerprint into the InstanceKlass only in the following 2 cases: 2063 if (CalculateClassFingerprint) { 2064 // (1) We are running AOT to generate a shared library. 2065 return true; 2066 } 2067 if (DumpSharedSpaces) { 2068 // (2) We are running -Xshare:dump to create a shared archive 2069 return true; 2070 } 2071 if (UseAOT && is_anonymous) { 2072 // (3) We are using AOT code from a shared library and see an anonymous class 2073 return true; 2074 } 2075 #endif 2076 2077 // In all other cases we might set the _misc_has_passed_fingerprint_check bit, 2078 // but do not store the 64-bit fingerprint to save space. 2079 return false; 2080 } 2081 2082 bool InstanceKlass::has_stored_fingerprint() const { 2083 #if INCLUDE_AOT 2084 return should_store_fingerprint() || is_shared(); 2085 #else 2086 return false; 2087 #endif 2088 } 2089 2090 uint64_t InstanceKlass::get_stored_fingerprint() const { 2091 address adr = adr_fingerprint(); 2092 if (adr != NULL) { 2093 return (uint64_t)Bytes::get_native_u8(adr); // adr may not be 64-bit aligned 2094 } 2095 return 0; 2096 } 2097 2098 void InstanceKlass::store_fingerprint(uint64_t fingerprint) { 2099 address adr = adr_fingerprint(); 2100 if (adr != NULL) { 2101 Bytes::put_native_u8(adr, (u8)fingerprint); // adr may not be 64-bit aligned 2102 2103 ResourceMark rm; 2104 log_trace(class, fingerprint)("stored as " PTR64_FORMAT " for class %s", fingerprint, external_name()); 2105 } 2106 } 2107 2108 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) { 2109 Klass::metaspace_pointers_do(it); 2110 2111 if (log_is_enabled(Trace, cds)) { 2112 ResourceMark rm; 2113 log_trace(cds)("Iter(InstanceKlass): %p (%s)", this, external_name()); 2114 } 2115 2116 it->push(&_annotations); 2117 it->push((Klass**)&_array_klasses); 2118 it->push(&_constants); 2119 it->push(&_inner_classes); 2120 it->push(&_array_name); 2121 #if INCLUDE_JVMTI 2122 it->push(&_previous_versions); 2123 #endif 2124 it->push(&_methods); 2125 it->push(&_default_methods); 2126 it->push(&_local_interfaces); 2127 it->push(&_transitive_interfaces); 2128 it->push(&_method_ordering); 2129 it->push(&_default_vtable_indices); 2130 it->push(&_fields); 2131 2132 if (itable_length() > 0) { 2133 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2134 int method_table_offset_in_words = ioe->offset()/wordSize; 2135 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 2136 / itableOffsetEntry::size(); 2137 2138 for (int i = 0; i < nof_interfaces; i ++, ioe ++) { 2139 if (ioe->interface_klass() != NULL) { 2140 it->push(ioe->interface_klass_addr()); 2141 itableMethodEntry* ime = ioe->first_method_entry(this); 2142 int n = klassItable::method_count_for_interface(ioe->interface_klass()); 2143 for (int index = 0; index < n; index ++) { 2144 it->push(ime[index].method_addr()); 2145 } 2146 } 2147 } 2148 } 2149 } 2150 2151 void InstanceKlass::remove_unshareable_info() { 2152 Klass::remove_unshareable_info(); 2153 2154 if (is_in_error_state()) { 2155 // Classes are attempted to link during dumping and may fail, 2156 // but these classes are still in the dictionary and class list in CLD. 2157 // Check in_error state first because in_error is > linked state, so 2158 // is_linked() is true. 2159 // If there's a linking error, there is nothing else to remove. 2160 return; 2161 } 2162 2163 // Unlink the class 2164 if (is_linked()) { 2165 unlink_class(); 2166 } 2167 init_implementor(); 2168 2169 constants()->remove_unshareable_info(); 2170 2171 for (int i = 0; i < methods()->length(); i++) { 2172 Method* m = methods()->at(i); 2173 m->remove_unshareable_info(); 2174 } 2175 2176 // do array classes also. 2177 if (array_klasses() != NULL) { 2178 array_klasses()->remove_unshareable_info(); 2179 } 2180 2181 // These are not allocated from metaspace, but they should should all be empty 2182 // during dump time, so we don't need to worry about them in InstanceKlass::iterate(). 2183 guarantee(_source_debug_extension == NULL, "must be"); 2184 guarantee(_dep_context == DependencyContext::EMPTY, "must be"); 2185 guarantee(_osr_nmethods_head == NULL, "must be"); 2186 2187 #if INCLUDE_JVMTI 2188 guarantee(_breakpoints == NULL, "must be"); 2189 guarantee(_previous_versions == NULL, "must be"); 2190 #endif 2191 2192 _init_thread = NULL; 2193 _methods_jmethod_ids = NULL; 2194 _jni_ids = NULL; 2195 _oop_map_cache = NULL; 2196 } 2197 2198 void InstanceKlass::remove_java_mirror() { 2199 Klass::remove_java_mirror(); 2200 2201 // do array classes also. 2202 if (array_klasses() != NULL) { 2203 array_klasses()->remove_java_mirror(); 2204 } 2205 } 2206 2207 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) { 2208 set_package(loader_data, CHECK); 2209 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK); 2210 2211 Array<Method*>* methods = this->methods(); 2212 int num_methods = methods->length(); 2213 for (int index2 = 0; index2 < num_methods; ++index2) { 2214 methodHandle m(THREAD, methods->at(index2)); 2215 m->restore_unshareable_info(CHECK); 2216 } 2217 if (JvmtiExport::has_redefined_a_class()) { 2218 // Reinitialize vtable because RedefineClasses may have changed some 2219 // entries in this vtable for super classes so the CDS vtable might 2220 // point to old or obsolete entries. RedefineClasses doesn't fix up 2221 // vtables in the shared system dictionary, only the main one. 2222 // It also redefines the itable too so fix that too. 2223 ResourceMark rm(THREAD); 2224 vtable().initialize_vtable(false, CHECK); 2225 itable().initialize_itable(false, CHECK); 2226 } 2227 2228 // restore constant pool resolved references 2229 constants()->restore_unshareable_info(CHECK); 2230 2231 if (array_klasses() != NULL) { 2232 // Array classes have null protection domain. 2233 // --> see ArrayKlass::complete_create_array_klass() 2234 array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK); 2235 } 2236 } 2237 2238 // returns true IFF is_in_error_state() has been changed as a result of this call. 2239 bool InstanceKlass::check_sharing_error_state() { 2240 assert(DumpSharedSpaces, "should only be called during dumping"); 2241 bool old_state = is_in_error_state(); 2242 2243 if (!is_in_error_state()) { 2244 bool bad = false; 2245 for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) { 2246 if (sup->is_in_error_state()) { 2247 bad = true; 2248 break; 2249 } 2250 } 2251 if (!bad) { 2252 Array<Klass*>* interfaces = transitive_interfaces(); 2253 for (int i = 0; i < interfaces->length(); i++) { 2254 Klass* iface = interfaces->at(i); 2255 if (InstanceKlass::cast(iface)->is_in_error_state()) { 2256 bad = true; 2257 break; 2258 } 2259 } 2260 } 2261 2262 if (bad) { 2263 set_in_error_state(); 2264 } 2265 } 2266 2267 return (old_state != is_in_error_state()); 2268 } 2269 2270 #if INCLUDE_JVMTI 2271 static void clear_all_breakpoints(Method* m) { 2272 m->clear_all_breakpoints(); 2273 } 2274 #endif 2275 2276 void InstanceKlass::notify_unload_class(InstanceKlass* ik) { 2277 // notify the debugger 2278 if (JvmtiExport::should_post_class_unload()) { 2279 JvmtiExport::post_class_unload(ik); 2280 } 2281 2282 // notify ClassLoadingService of class unload 2283 ClassLoadingService::notify_class_unloaded(ik); 2284 } 2285 2286 void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) { 2287 // Clean up C heap 2288 ik->release_C_heap_structures(); 2289 ik->constants()->release_C_heap_structures(); 2290 } 2291 2292 void InstanceKlass::release_C_heap_structures() { 2293 // Can't release the constant pool here because the constant pool can be 2294 // deallocated separately from the InstanceKlass for default methods and 2295 // redefine classes. 2296 2297 // Deallocate oop map cache 2298 if (_oop_map_cache != NULL) { 2299 delete _oop_map_cache; 2300 _oop_map_cache = NULL; 2301 } 2302 2303 // Deallocate JNI identifiers for jfieldIDs 2304 JNIid::deallocate(jni_ids()); 2305 set_jni_ids(NULL); 2306 2307 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2308 if (jmeths != (jmethodID*)NULL) { 2309 release_set_methods_jmethod_ids(NULL); 2310 FreeHeap(jmeths); 2311 } 2312 2313 // Release dependencies. 2314 // It is desirable to use DC::remove_all_dependents() here, but, unfortunately, 2315 // it is not safe (see JDK-8143408). The problem is that the klass dependency 2316 // context can contain live dependencies, since there's a race between nmethod & 2317 // klass unloading. If the klass is dead when nmethod unloading happens, relevant 2318 // dependencies aren't removed from the context associated with the class (see 2319 // nmethod::flush_dependencies). It ends up during klass unloading as seemingly 2320 // live dependencies pointing to unloaded nmethods and causes a crash in 2321 // DC::remove_all_dependents() when it touches unloaded nmethod. 2322 dependencies().wipe(); 2323 2324 #if INCLUDE_JVMTI 2325 // Deallocate breakpoint records 2326 if (breakpoints() != 0x0) { 2327 methods_do(clear_all_breakpoints); 2328 assert(breakpoints() == 0x0, "should have cleared breakpoints"); 2329 } 2330 2331 // deallocate the cached class file 2332 if (_cached_class_file != NULL && !MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 2333 os::free(_cached_class_file); 2334 _cached_class_file = NULL; 2335 } 2336 #endif 2337 2338 // Decrement symbol reference counts associated with the unloaded class. 2339 if (_name != NULL) _name->decrement_refcount(); 2340 // unreference array name derived from this class name (arrays of an unloaded 2341 // class can't be referenced anymore). 2342 if (_array_name != NULL) _array_name->decrement_refcount(); 2343 if (_value_types != NULL) { 2344 for (int i = 0; i < _value_types->length(); i++) { 2345 Symbol* s = _value_types->at(i)._class_name; 2346 if (s != NULL) { 2347 s->decrement_refcount(); 2348 } 2349 } 2350 } 2351 if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension); 2352 } 2353 2354 void InstanceKlass::set_source_debug_extension(const char* array, int length) { 2355 if (array == NULL) { 2356 _source_debug_extension = NULL; 2357 } else { 2358 // Adding one to the attribute length in order to store a null terminator 2359 // character could cause an overflow because the attribute length is 2360 // already coded with an u4 in the classfile, but in practice, it's 2361 // unlikely to happen. 2362 assert((length+1) > length, "Overflow checking"); 2363 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass); 2364 for (int i = 0; i < length; i++) { 2365 sde[i] = array[i]; 2366 } 2367 sde[length] = '\0'; 2368 _source_debug_extension = sde; 2369 } 2370 } 2371 2372 address InstanceKlass::static_field_addr(int offset) { 2373 assert(offset >= InstanceMirrorKlass::offset_of_static_fields(), "has already been adjusted"); 2374 return (address)(offset + cast_from_oop<intptr_t>(java_mirror())); 2375 } 2376 2377 2378 const char* InstanceKlass::signature_name() const { 2379 int hash_len = 0; 2380 char hash_buf[40]; 2381 2382 // If this is an anonymous class, append a hash to make the name unique 2383 if (is_anonymous()) { 2384 intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0; 2385 jio_snprintf(hash_buf, sizeof(hash_buf), "/" UINTX_FORMAT, (uintx)hash); 2386 hash_len = (int)strlen(hash_buf); 2387 } 2388 2389 // Get the internal name as a c string 2390 const char* src = (const char*) (name()->as_C_string()); 2391 const int src_length = (int)strlen(src); 2392 2393 char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3); 2394 2395 // Add L as type indicator 2396 int dest_index = 0; 2397 dest[dest_index++] = 'L'; 2398 2399 // Add the actual class name 2400 for (int src_index = 0; src_index < src_length; ) { 2401 dest[dest_index++] = src[src_index++]; 2402 } 2403 2404 // If we have a hash, append it 2405 for (int hash_index = 0; hash_index < hash_len; ) { 2406 dest[dest_index++] = hash_buf[hash_index++]; 2407 } 2408 2409 // Add the semicolon and the NULL 2410 dest[dest_index++] = ';'; 2411 dest[dest_index] = '\0'; 2412 return dest; 2413 } 2414 2415 // Used to obtain the package name from a fully qualified class name. 2416 Symbol* InstanceKlass::package_from_name(const Symbol* name, TRAPS) { 2417 if (name == NULL) { 2418 return NULL; 2419 } else { 2420 if (name->utf8_length() <= 0) { 2421 return NULL; 2422 } 2423 ResourceMark rm; 2424 const char* package_name = ClassLoader::package_from_name((const char*) name->as_C_string()); 2425 if (package_name == NULL) { 2426 return NULL; 2427 } 2428 Symbol* pkg_name = SymbolTable::new_symbol(package_name, THREAD); 2429 return pkg_name; 2430 } 2431 } 2432 2433 ModuleEntry* InstanceKlass::module() const { 2434 if (!in_unnamed_package()) { 2435 return _package_entry->module(); 2436 } 2437 const Klass* host = host_klass(); 2438 if (host == NULL) { 2439 return class_loader_data()->unnamed_module(); 2440 } 2441 return host->class_loader_data()->unnamed_module(); 2442 } 2443 2444 void InstanceKlass::set_package(ClassLoaderData* loader_data, TRAPS) { 2445 2446 // ensure java/ packages only loaded by boot or platform builtin loaders 2447 Handle class_loader(THREAD, loader_data->class_loader()); 2448 check_prohibited_package(name(), class_loader, CHECK); 2449 2450 TempNewSymbol pkg_name = package_from_name(name(), CHECK); 2451 2452 if (pkg_name != NULL && loader_data != NULL) { 2453 2454 // Find in class loader's package entry table. 2455 _package_entry = loader_data->packages()->lookup_only(pkg_name); 2456 2457 // If the package name is not found in the loader's package 2458 // entry table, it is an indication that the package has not 2459 // been defined. Consider it defined within the unnamed module. 2460 if (_package_entry == NULL) { 2461 ResourceMark rm; 2462 2463 if (!ModuleEntryTable::javabase_defined()) { 2464 // Before java.base is defined during bootstrapping, define all packages in 2465 // the java.base module. If a non-java.base package is erroneously placed 2466 // in the java.base module it will be caught later when java.base 2467 // is defined by ModuleEntryTable::verify_javabase_packages check. 2468 assert(ModuleEntryTable::javabase_moduleEntry() != NULL, JAVA_BASE_NAME " module is NULL"); 2469 _package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry()); 2470 } else { 2471 assert(loader_data->unnamed_module() != NULL, "unnamed module is NULL"); 2472 _package_entry = loader_data->packages()->lookup(pkg_name, 2473 loader_data->unnamed_module()); 2474 } 2475 2476 // A package should have been successfully created 2477 assert(_package_entry != NULL, "Package entry for class %s not found, loader %s", 2478 name()->as_C_string(), loader_data->loader_name()); 2479 } 2480 2481 if (log_is_enabled(Debug, module)) { 2482 ResourceMark rm; 2483 ModuleEntry* m = _package_entry->module(); 2484 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s", 2485 external_name(), 2486 pkg_name->as_C_string(), 2487 loader_data->loader_name(), 2488 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE)); 2489 } 2490 } else { 2491 ResourceMark rm; 2492 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s", 2493 external_name(), 2494 (loader_data != NULL) ? loader_data->loader_name() : "NULL", 2495 UNNAMED_MODULE); 2496 } 2497 } 2498 2499 2500 // different versions of is_same_class_package 2501 2502 bool InstanceKlass::is_same_class_package(const Klass* class2) const { 2503 oop classloader1 = this->class_loader(); 2504 PackageEntry* classpkg1 = this->package(); 2505 if (class2->is_objArray_klass()) { 2506 class2 = ObjArrayKlass::cast(class2)->bottom_klass(); 2507 } 2508 2509 oop classloader2; 2510 PackageEntry* classpkg2; 2511 if (class2->is_instance_klass()) { 2512 classloader2 = class2->class_loader(); 2513 classpkg2 = class2->package(); 2514 } else { 2515 assert(class2->is_typeArray_klass(), "should be type array"); 2516 classloader2 = NULL; 2517 classpkg2 = NULL; 2518 } 2519 2520 // Same package is determined by comparing class loader 2521 // and package entries. Both must be the same. This rule 2522 // applies even to classes that are defined in the unnamed 2523 // package, they still must have the same class loader. 2524 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) { 2525 return true; 2526 } 2527 2528 return false; 2529 } 2530 2531 // return true if this class and other_class are in the same package. Classloader 2532 // and classname information is enough to determine a class's package 2533 bool InstanceKlass::is_same_class_package(oop other_class_loader, 2534 const Symbol* other_class_name) const { 2535 if (class_loader() != other_class_loader) { 2536 return false; 2537 } 2538 if (name()->fast_compare(other_class_name) == 0) { 2539 return true; 2540 } 2541 2542 { 2543 ResourceMark rm; 2544 2545 bool bad_class_name = false; 2546 const char* other_pkg = 2547 ClassLoader::package_from_name((const char*) other_class_name->as_C_string(), &bad_class_name); 2548 if (bad_class_name) { 2549 return false; 2550 } 2551 // Check that package_from_name() returns NULL, not "", if there is no package. 2552 assert(other_pkg == NULL || strlen(other_pkg) > 0, "package name is empty string"); 2553 2554 const Symbol* const this_package_name = 2555 this->package() != NULL ? this->package()->name() : NULL; 2556 2557 if (this_package_name == NULL || other_pkg == NULL) { 2558 // One of the two doesn't have a package. Only return true if the other 2559 // one also doesn't have a package. 2560 return (const char*)this_package_name == other_pkg; 2561 } 2562 2563 // Check if package is identical 2564 return this_package_name->equals(other_pkg); 2565 } 2566 } 2567 2568 // Returns true iff super_method can be overridden by a method in targetclassname 2569 // See JLS 3rd edition 8.4.6.1 2570 // Assumes name-signature match 2571 // "this" is InstanceKlass of super_method which must exist 2572 // note that the InstanceKlass of the method in the targetclassname has not always been created yet 2573 bool InstanceKlass::is_override(const methodHandle& super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) { 2574 // Private methods can not be overridden 2575 if (super_method->is_private()) { 2576 return false; 2577 } 2578 // If super method is accessible, then override 2579 if ((super_method->is_protected()) || 2580 (super_method->is_public())) { 2581 return true; 2582 } 2583 // Package-private methods are not inherited outside of package 2584 assert(super_method->is_package_private(), "must be package private"); 2585 return(is_same_class_package(targetclassloader(), targetclassname)); 2586 } 2587 2588 // Only boot and platform class loaders can define classes in "java/" packages. 2589 void InstanceKlass::check_prohibited_package(Symbol* class_name, 2590 Handle class_loader, 2591 TRAPS) { 2592 if (!class_loader.is_null() && 2593 !SystemDictionary::is_platform_class_loader(class_loader()) && 2594 class_name != NULL) { 2595 ResourceMark rm(THREAD); 2596 char* name = class_name->as_C_string(); 2597 if (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/') { 2598 TempNewSymbol pkg_name = InstanceKlass::package_from_name(class_name, CHECK); 2599 assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'"); 2600 name = pkg_name->as_C_string(); 2601 const char* class_loader_name = SystemDictionary::loader_name(class_loader()); 2602 StringUtils::replace_no_expand(name, "/", "."); 2603 const char* msg_text1 = "Class loader (instance of): "; 2604 const char* msg_text2 = " tried to load prohibited package name: "; 2605 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1; 2606 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len); 2607 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name); 2608 THROW_MSG(vmSymbols::java_lang_SecurityException(), message); 2609 } 2610 } 2611 return; 2612 } 2613 2614 // tell if two classes have the same enclosing class (at package level) 2615 bool InstanceKlass::is_same_package_member(const Klass* class2, TRAPS) const { 2616 if (class2 == this) return true; 2617 if (!class2->is_instance_klass()) return false; 2618 2619 // must be in same package before we try anything else 2620 if (!is_same_class_package(class2)) 2621 return false; 2622 2623 // As long as there is an outer_this.getEnclosingClass, 2624 // shift the search outward. 2625 const InstanceKlass* outer_this = this; 2626 for (;;) { 2627 // As we walk along, look for equalities between outer_this and class2. 2628 // Eventually, the walks will terminate as outer_this stops 2629 // at the top-level class around the original class. 2630 bool ignore_inner_is_member; 2631 const Klass* next = outer_this->compute_enclosing_class(&ignore_inner_is_member, 2632 CHECK_false); 2633 if (next == NULL) break; 2634 if (next == class2) return true; 2635 outer_this = InstanceKlass::cast(next); 2636 } 2637 2638 // Now do the same for class2. 2639 const InstanceKlass* outer2 = InstanceKlass::cast(class2); 2640 for (;;) { 2641 bool ignore_inner_is_member; 2642 Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member, 2643 CHECK_false); 2644 if (next == NULL) break; 2645 // Might as well check the new outer against all available values. 2646 if (next == this) return true; 2647 if (next == outer_this) return true; 2648 outer2 = InstanceKlass::cast(next); 2649 } 2650 2651 // If by this point we have not found an equality between the 2652 // two classes, we know they are in separate package members. 2653 return false; 2654 } 2655 2656 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const { 2657 constantPoolHandle i_cp(THREAD, constants()); 2658 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) { 2659 int ioff = iter.inner_class_info_index(); 2660 if (ioff != 0) { 2661 // Check to see if the name matches the class we're looking for 2662 // before attempting to find the class. 2663 if (i_cp->klass_name_at_matches(this, ioff)) { 2664 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false); 2665 if (this == inner_klass) { 2666 *ooff = iter.outer_class_info_index(); 2667 *noff = iter.inner_name_index(); 2668 return true; 2669 } 2670 } 2671 } 2672 } 2673 return false; 2674 } 2675 2676 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const { 2677 InstanceKlass* outer_klass = NULL; 2678 *inner_is_member = false; 2679 int ooff = 0, noff = 0; 2680 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD); 2681 if (has_inner_classes_attr) { 2682 constantPoolHandle i_cp(THREAD, constants()); 2683 if (ooff != 0) { 2684 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL); 2685 outer_klass = InstanceKlass::cast(ok); 2686 *inner_is_member = true; 2687 } 2688 if (NULL == outer_klass) { 2689 // It may be anonymous; try for that. 2690 int encl_method_class_idx = enclosing_method_class_index(); 2691 if (encl_method_class_idx != 0) { 2692 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL); 2693 outer_klass = InstanceKlass::cast(ok); 2694 *inner_is_member = false; 2695 } 2696 } 2697 } 2698 2699 // If no inner class attribute found for this class. 2700 if (NULL == outer_klass) return NULL; 2701 2702 // Throws an exception if outer klass has not declared k as an inner klass 2703 // We need evidence that each klass knows about the other, or else 2704 // the system could allow a spoof of an inner class to gain access rights. 2705 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL); 2706 return outer_klass; 2707 } 2708 2709 jint InstanceKlass::compute_modifier_flags(TRAPS) const { 2710 jint access = access_flags().as_int(); 2711 2712 // But check if it happens to be member class. 2713 InnerClassesIterator iter(this); 2714 for (; !iter.done(); iter.next()) { 2715 int ioff = iter.inner_class_info_index(); 2716 // Inner class attribute can be zero, skip it. 2717 // Strange but true: JVM spec. allows null inner class refs. 2718 if (ioff == 0) continue; 2719 2720 // only look at classes that are already loaded 2721 // since we are looking for the flags for our self. 2722 Symbol* inner_name = constants()->klass_name_at(ioff); 2723 if (name() == inner_name) { 2724 // This is really a member class. 2725 access = iter.inner_access_flags(); 2726 break; 2727 } 2728 } 2729 // Remember to strip ACC_SUPER bit 2730 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS; 2731 } 2732 2733 jint InstanceKlass::jvmti_class_status() const { 2734 jint result = 0; 2735 2736 if (is_linked()) { 2737 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED; 2738 } 2739 2740 if (is_initialized()) { 2741 assert(is_linked(), "Class status is not consistent"); 2742 result |= JVMTI_CLASS_STATUS_INITIALIZED; 2743 } 2744 if (is_in_error_state()) { 2745 result |= JVMTI_CLASS_STATUS_ERROR; 2746 } 2747 return result; 2748 } 2749 2750 Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) { 2751 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2752 int method_table_offset_in_words = ioe->offset()/wordSize; 2753 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 2754 / itableOffsetEntry::size(); 2755 2756 for (int cnt = 0 ; ; cnt ++, ioe ++) { 2757 // If the interface isn't implemented by the receiver class, 2758 // the VM should throw IncompatibleClassChangeError. 2759 if (cnt >= nof_interfaces) { 2760 THROW_NULL(vmSymbols::java_lang_IncompatibleClassChangeError()); 2761 } 2762 2763 Klass* ik = ioe->interface_klass(); 2764 if (ik == holder) break; 2765 } 2766 2767 itableMethodEntry* ime = ioe->first_method_entry(this); 2768 Method* m = ime[index].method(); 2769 if (m == NULL) { 2770 THROW_NULL(vmSymbols::java_lang_AbstractMethodError()); 2771 } 2772 return m; 2773 } 2774 2775 2776 #if INCLUDE_JVMTI 2777 // update default_methods for redefineclasses for methods that are 2778 // not yet in the vtable due to concurrent subclass define and superinterface 2779 // redefinition 2780 // Note: those in the vtable, should have been updated via adjust_method_entries 2781 void InstanceKlass::adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed) { 2782 // search the default_methods for uses of either obsolete or EMCP methods 2783 if (default_methods() != NULL) { 2784 for (int index = 0; index < default_methods()->length(); index ++) { 2785 Method* old_method = default_methods()->at(index); 2786 if (old_method == NULL || old_method->method_holder() != holder || !old_method->is_old()) { 2787 continue; // skip uninteresting entries 2788 } 2789 assert(!old_method->is_deleted(), "default methods may not be deleted"); 2790 2791 Method* new_method = holder->method_with_idnum(old_method->orig_method_idnum()); 2792 2793 assert(new_method != NULL, "method_with_idnum() should not be NULL"); 2794 assert(old_method != new_method, "sanity check"); 2795 2796 default_methods()->at_put(index, new_method); 2797 if (log_is_enabled(Info, redefine, class, update)) { 2798 ResourceMark rm; 2799 if (!(*trace_name_printed)) { 2800 log_info(redefine, class, update) 2801 ("adjust: klassname=%s default methods from name=%s", 2802 external_name(), old_method->method_holder()->external_name()); 2803 *trace_name_printed = true; 2804 } 2805 log_debug(redefine, class, update, vtables) 2806 ("default method update: %s(%s) ", 2807 new_method->name()->as_C_string(), new_method->signature()->as_C_string()); 2808 } 2809 } 2810 } 2811 } 2812 #endif // INCLUDE_JVMTI 2813 2814 // On-stack replacement stuff 2815 void InstanceKlass::add_osr_nmethod(nmethod* n) { 2816 // only one compilation can be active 2817 { 2818 // This is a short non-blocking critical region, so the no safepoint check is ok. 2819 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2820 assert(n->is_osr_method(), "wrong kind of nmethod"); 2821 n->set_osr_link(osr_nmethods_head()); 2822 set_osr_nmethods_head(n); 2823 // Raise the highest osr level if necessary 2824 if (TieredCompilation) { 2825 Method* m = n->method(); 2826 m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level())); 2827 } 2828 } 2829 2830 // Get rid of the osr methods for the same bci that have lower levels. 2831 if (TieredCompilation) { 2832 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) { 2833 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true); 2834 if (inv != NULL && inv->is_in_use()) { 2835 inv->make_not_entrant(); 2836 } 2837 } 2838 } 2839 } 2840 2841 // Remove osr nmethod from the list. Return true if found and removed. 2842 bool InstanceKlass::remove_osr_nmethod(nmethod* n) { 2843 // This is a short non-blocking critical region, so the no safepoint check is ok. 2844 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2845 assert(n->is_osr_method(), "wrong kind of nmethod"); 2846 nmethod* last = NULL; 2847 nmethod* cur = osr_nmethods_head(); 2848 int max_level = CompLevel_none; // Find the max comp level excluding n 2849 Method* m = n->method(); 2850 // Search for match 2851 bool found = false; 2852 while(cur != NULL && cur != n) { 2853 if (TieredCompilation && m == cur->method()) { 2854 // Find max level before n 2855 max_level = MAX2(max_level, cur->comp_level()); 2856 } 2857 last = cur; 2858 cur = cur->osr_link(); 2859 } 2860 nmethod* next = NULL; 2861 if (cur == n) { 2862 found = true; 2863 next = cur->osr_link(); 2864 if (last == NULL) { 2865 // Remove first element 2866 set_osr_nmethods_head(next); 2867 } else { 2868 last->set_osr_link(next); 2869 } 2870 } 2871 n->set_osr_link(NULL); 2872 if (TieredCompilation) { 2873 cur = next; 2874 while (cur != NULL) { 2875 // Find max level after n 2876 if (m == cur->method()) { 2877 max_level = MAX2(max_level, cur->comp_level()); 2878 } 2879 cur = cur->osr_link(); 2880 } 2881 m->set_highest_osr_comp_level(max_level); 2882 } 2883 return found; 2884 } 2885 2886 int InstanceKlass::mark_osr_nmethods(const Method* m) { 2887 // This is a short non-blocking critical region, so the no safepoint check is ok. 2888 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2889 nmethod* osr = osr_nmethods_head(); 2890 int found = 0; 2891 while (osr != NULL) { 2892 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2893 if (osr->method() == m) { 2894 osr->mark_for_deoptimization(); 2895 found++; 2896 } 2897 osr = osr->osr_link(); 2898 } 2899 return found; 2900 } 2901 2902 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { 2903 // This is a short non-blocking critical region, so the no safepoint check is ok. 2904 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2905 nmethod* osr = osr_nmethods_head(); 2906 nmethod* best = NULL; 2907 while (osr != NULL) { 2908 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2909 // There can be a time when a c1 osr method exists but we are waiting 2910 // for a c2 version. When c2 completes its osr nmethod we will trash 2911 // the c1 version and only be able to find the c2 version. However 2912 // while we overflow in the c1 code at back branches we don't want to 2913 // try and switch to the same code as we are already running 2914 2915 if (osr->method() == m && 2916 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) { 2917 if (match_level) { 2918 if (osr->comp_level() == comp_level) { 2919 // Found a match - return it. 2920 return osr; 2921 } 2922 } else { 2923 if (best == NULL || (osr->comp_level() > best->comp_level())) { 2924 if (osr->comp_level() == CompLevel_highest_tier) { 2925 // Found the best possible - return it. 2926 return osr; 2927 } 2928 best = osr; 2929 } 2930 } 2931 } 2932 osr = osr->osr_link(); 2933 } 2934 if (best != NULL && best->comp_level() >= comp_level && match_level == false) { 2935 return best; 2936 } 2937 return NULL; 2938 } 2939 2940 // ----------------------------------------------------------------------------------------------------- 2941 // Printing 2942 2943 #ifndef PRODUCT 2944 2945 #define BULLET " - " 2946 2947 static const char* state_names[] = { 2948 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error" 2949 }; 2950 2951 static void print_vtable(address self, intptr_t* start, int len, outputStream* st) { 2952 ResourceMark rm; 2953 int* forward_refs = NEW_RESOURCE_ARRAY(int, len); 2954 for (int i = 0; i < len; i++) forward_refs[i] = 0; 2955 for (int i = 0; i < len; i++) { 2956 intptr_t e = start[i]; 2957 st->print("%d : " INTPTR_FORMAT, i, e); 2958 if (forward_refs[i] != 0) { 2959 int from = forward_refs[i]; 2960 int off = (int) start[from]; 2961 st->print(" (offset %d <= [%d])", off, from); 2962 } 2963 if (e != 0 && ((Metadata*)e)->is_metaspace_object()) { 2964 st->print(" "); 2965 ((Metadata*)e)->print_value_on(st); 2966 } else if (self != NULL && e > 0 && e < 0x10000) { 2967 address location = self + e; 2968 int index = (int)((intptr_t*)location - start); 2969 st->print(" (offset %d => [%d])", (int)e, index); 2970 if (index >= 0 && index < len) 2971 forward_refs[index] = i; 2972 } 2973 st->cr(); 2974 } 2975 } 2976 2977 static void print_vtable(vtableEntry* start, int len, outputStream* st) { 2978 return print_vtable(NULL, reinterpret_cast<intptr_t*>(start), len, st); 2979 } 2980 2981 template<typename T> 2982 static void print_array_on(outputStream* st, Array<T>* array) { 2983 if (array == NULL) { st->print_cr("NULL"); return; } 2984 array->print_value_on(st); st->cr(); 2985 if (Verbose || WizardMode) { 2986 for (int i = 0; i < array->length(); i++) { 2987 st->print("%d : ", i); array->at(i)->print_value_on(st); st->cr(); 2988 } 2989 } 2990 } 2991 2992 static void print_array_on(outputStream* st, Array<int>* array) { 2993 if (array == NULL) { st->print_cr("NULL"); return; } 2994 array->print_value_on(st); st->cr(); 2995 if (Verbose || WizardMode) { 2996 for (int i = 0; i < array->length(); i++) { 2997 st->print("%d : %d", i, array->at(i)); st->cr(); 2998 } 2999 } 3000 } 3001 3002 void InstanceKlass::print_on(outputStream* st) const { 3003 assert(is_klass(), "must be klass"); 3004 Klass::print_on(st); 3005 3006 st->print(BULLET"instance size: %d", size_helper()); st->cr(); 3007 st->print(BULLET"klass size: %d", size()); st->cr(); 3008 st->print(BULLET"access: "); access_flags().print_on(st); st->cr(); 3009 st->print(BULLET"misc flags: 0x%x", _misc_flags); st->cr(); 3010 st->print(BULLET"state: "); st->print_cr("%s", state_names[_init_state]); 3011 st->print(BULLET"name: "); name()->print_value_on(st); st->cr(); 3012 st->print(BULLET"super: "); super()->print_value_on_maybe_null(st); st->cr(); 3013 st->print(BULLET"sub: "); 3014 Klass* sub = subklass(); 3015 int n; 3016 for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) { 3017 if (n < MaxSubklassPrintSize) { 3018 sub->print_value_on(st); 3019 st->print(" "); 3020 } 3021 } 3022 if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize); 3023 st->cr(); 3024 3025 if (is_interface()) { 3026 st->print_cr(BULLET"nof implementors: %d", nof_implementors()); 3027 if (nof_implementors() == 1) { 3028 st->print_cr(BULLET"implementor: "); 3029 st->print(" "); 3030 implementor()->print_value_on(st); 3031 st->cr(); 3032 } 3033 } 3034 3035 st->print(BULLET"arrays: "); array_klasses()->print_value_on_maybe_null(st); st->cr(); 3036 st->print(BULLET"methods: "); print_array_on(st, methods()); 3037 st->print(BULLET"method ordering: "); print_array_on(st, method_ordering()); 3038 st->print(BULLET"default_methods: "); print_array_on(st, default_methods()); 3039 if (default_vtable_indices() != NULL) { 3040 st->print(BULLET"default vtable indices: "); print_array_on(st, default_vtable_indices()); 3041 } 3042 st->print(BULLET"local interfaces: "); print_array_on(st, local_interfaces()); 3043 st->print(BULLET"trans. interfaces: "); print_array_on(st, transitive_interfaces()); 3044 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr(); 3045 if (class_loader_data() != NULL) { 3046 st->print(BULLET"class loader data: "); 3047 class_loader_data()->print_value_on(st); 3048 st->cr(); 3049 } 3050 st->print(BULLET"host class: "); host_klass()->print_value_on_maybe_null(st); st->cr(); 3051 if (source_file_name() != NULL) { 3052 st->print(BULLET"source file: "); 3053 source_file_name()->print_value_on(st); 3054 st->cr(); 3055 } 3056 if (source_debug_extension() != NULL) { 3057 st->print(BULLET"source debug extension: "); 3058 st->print("%s", source_debug_extension()); 3059 st->cr(); 3060 } 3061 st->print(BULLET"class annotations: "); class_annotations()->print_value_on(st); st->cr(); 3062 st->print(BULLET"class type annotations: "); class_type_annotations()->print_value_on(st); st->cr(); 3063 st->print(BULLET"field annotations: "); fields_annotations()->print_value_on(st); st->cr(); 3064 st->print(BULLET"field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr(); 3065 { 3066 bool have_pv = false; 3067 // previous versions are linked together through the InstanceKlass 3068 for (InstanceKlass* pv_node = previous_versions(); 3069 pv_node != NULL; 3070 pv_node = pv_node->previous_versions()) { 3071 if (!have_pv) 3072 st->print(BULLET"previous version: "); 3073 have_pv = true; 3074 pv_node->constants()->print_value_on(st); 3075 } 3076 if (have_pv) st->cr(); 3077 } 3078 3079 if (generic_signature() != NULL) { 3080 st->print(BULLET"generic signature: "); 3081 generic_signature()->print_value_on(st); 3082 st->cr(); 3083 } 3084 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr(); 3085 st->print(BULLET"java mirror: "); java_mirror()->print_value_on(st); st->cr(); 3086 st->print(BULLET"vtable length %d (start addr: " INTPTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr(); 3087 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st); 3088 st->print(BULLET"itable length %d (start addr: " INTPTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr(); 3089 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(NULL, start_of_itable(), itable_length(), st); 3090 st->print_cr(BULLET"---- static fields (%d words):", static_field_size()); 3091 FieldPrinter print_static_field(st); 3092 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field); 3093 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size()); 3094 FieldPrinter print_nonstatic_field(st); 3095 InstanceKlass* ik = const_cast<InstanceKlass*>(this); 3096 ik->do_nonstatic_fields(&print_nonstatic_field); 3097 3098 st->print(BULLET"non-static oop maps: "); 3099 OopMapBlock* map = start_of_nonstatic_oop_maps(); 3100 OopMapBlock* end_map = map + nonstatic_oop_map_count(); 3101 while (map < end_map) { 3102 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1)); 3103 map++; 3104 } 3105 st->cr(); 3106 } 3107 3108 #endif //PRODUCT 3109 3110 void InstanceKlass::print_value_on(outputStream* st) const { 3111 assert(is_klass(), "must be klass"); 3112 if (Verbose || WizardMode) access_flags().print_on(st); 3113 name()->print_value_on(st); 3114 } 3115 3116 #ifndef PRODUCT 3117 3118 void FieldPrinter::do_field(fieldDescriptor* fd) { 3119 _st->print(BULLET); 3120 if (_obj == NULL) { 3121 fd->print_on(_st); 3122 _st->cr(); 3123 } else { 3124 fd->print_on_for(_st, _obj); 3125 _st->cr(); 3126 } 3127 } 3128 3129 3130 void InstanceKlass::oop_print_on(oop obj, outputStream* st) { 3131 Klass::oop_print_on(obj, st); 3132 3133 if (this == SystemDictionary::String_klass()) { 3134 typeArrayOop value = java_lang_String::value(obj); 3135 juint length = java_lang_String::length(obj); 3136 if (value != NULL && 3137 value->is_typeArray() && 3138 length <= (juint) value->length()) { 3139 st->print(BULLET"string: "); 3140 java_lang_String::print(obj, st); 3141 st->cr(); 3142 if (!WizardMode) return; // that is enough 3143 } 3144 } 3145 3146 st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj)); 3147 FieldPrinter print_field(st, obj); 3148 do_nonstatic_fields(&print_field); 3149 3150 if (this == SystemDictionary::Class_klass()) { 3151 st->print(BULLET"signature: "); 3152 java_lang_Class::print_signature(obj, st); 3153 st->cr(); 3154 Klass* mirrored_klass = java_lang_Class::as_Klass(obj); 3155 st->print(BULLET"fake entry for mirror: "); 3156 mirrored_klass->print_value_on_maybe_null(st); 3157 st->cr(); 3158 Klass* array_klass = java_lang_Class::array_klass_acquire(obj); 3159 st->print(BULLET"fake entry for array: "); 3160 array_klass->print_value_on_maybe_null(st); 3161 st->cr(); 3162 st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj)); 3163 st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj)); 3164 Klass* real_klass = java_lang_Class::as_Klass(obj); 3165 if (real_klass != NULL && real_klass->is_instance_klass()) { 3166 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field); 3167 } 3168 } else if (this == SystemDictionary::MethodType_klass()) { 3169 st->print(BULLET"signature: "); 3170 java_lang_invoke_MethodType::print_signature(obj, st); 3171 st->cr(); 3172 } 3173 } 3174 3175 #endif //PRODUCT 3176 3177 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) { 3178 st->print("a "); 3179 name()->print_value_on(st); 3180 obj->print_address_on(st); 3181 if (this == SystemDictionary::String_klass() 3182 && java_lang_String::value(obj) != NULL) { 3183 ResourceMark rm; 3184 int len = java_lang_String::length(obj); 3185 int plen = (len < 24 ? len : 12); 3186 char* str = java_lang_String::as_utf8_string(obj, 0, plen); 3187 st->print(" = \"%s\"", str); 3188 if (len > plen) 3189 st->print("...[%d]", len); 3190 } else if (this == SystemDictionary::Class_klass()) { 3191 Klass* k = java_lang_Class::as_Klass(obj); 3192 st->print(" = "); 3193 if (k != NULL) { 3194 k->print_value_on(st); 3195 } else { 3196 const char* tname = type2name(java_lang_Class::primitive_type(obj)); 3197 st->print("%s", tname ? tname : "type?"); 3198 } 3199 } else if (this == SystemDictionary::MethodType_klass()) { 3200 st->print(" = "); 3201 java_lang_invoke_MethodType::print_signature(obj, st); 3202 } else if (java_lang_boxing_object::is_instance(obj)) { 3203 st->print(" = "); 3204 java_lang_boxing_object::print(obj, st); 3205 } else if (this == SystemDictionary::LambdaForm_klass()) { 3206 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj); 3207 if (vmentry != NULL) { 3208 st->print(" => "); 3209 vmentry->print_value_on(st); 3210 } 3211 } else if (this == SystemDictionary::MemberName_klass()) { 3212 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj); 3213 if (vmtarget != NULL) { 3214 st->print(" = "); 3215 vmtarget->print_value_on(st); 3216 } else { 3217 java_lang_invoke_MemberName::clazz(obj)->print_value_on(st); 3218 st->print("."); 3219 java_lang_invoke_MemberName::name(obj)->print_value_on(st); 3220 } 3221 } 3222 } 3223 3224 const char* InstanceKlass::internal_name() const { 3225 return external_name(); 3226 } 3227 3228 bool InstanceKlass::is_declared_value_type(int index) { 3229 assert(constants()->is_within_bounds(index) && 3230 constants()->tag_at(index).is_klass_or_reference(), "Invalid index"); 3231 return InstanceKlass::is_declared_value_type(value_types(), index); 3232 } 3233 3234 bool InstanceKlass::is_declared_value_type(Array<ValueTypes>* value_types, int index) { 3235 if (value_types == NULL) return false; // No ValueType attribute in this class file 3236 for(int i = 0; i < value_types->length(); i++) { 3237 if (value_types->at(i)._class_info_index == index) { 3238 return true; 3239 } 3240 } 3241 return false; 3242 } 3243 3244 bool InstanceKlass::is_declared_value_type(Symbol* symbol) { 3245 return InstanceKlass::is_declared_value_type(constants(), value_types(), symbol); 3246 } 3247 3248 bool InstanceKlass::is_declared_value_type(ConstantPool* constants, Array<ValueTypes>* value_types, Symbol* symbol) { 3249 assert(symbol != NULL, "Sanity check"); 3250 if (value_types == NULL) return false; // No ValueType attribute in this class file 3251 for(int i = 0; i < value_types->length(); i++) { 3252 if (value_types->at(i)._class_name == symbol) { 3253 return true; 3254 } 3255 } 3256 // symbol not found, class name symbol might not have been 3257 // updated yet 3258 for(int i = 0; i < value_types->length(); i++) { 3259 if (constants->klass_at_noresolve((int)value_types->at(i)._class_info_index) == symbol) { 3260 value_types->adr_at(i)->_class_name = symbol; 3261 symbol->increment_refcount(); 3262 return true; 3263 } 3264 } 3265 return false; 3266 } 3267 3268 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data, 3269 const char* module_name, 3270 const ClassFileStream* cfs) const { 3271 if (!log_is_enabled(Info, class, load)) { 3272 return; 3273 } 3274 3275 ResourceMark rm; 3276 LogMessage(class, load) msg; 3277 stringStream info_stream; 3278 3279 // Name and class hierarchy info 3280 info_stream.print("%s", external_name()); 3281 3282 // Source 3283 if (cfs != NULL) { 3284 if (cfs->source() != NULL) { 3285 if (module_name != NULL) { 3286 if (ClassLoader::is_modules_image(cfs->source())) { 3287 info_stream.print(" source: jrt:/%s", module_name); 3288 } else { 3289 info_stream.print(" source: %s", cfs->source()); 3290 } 3291 } else { 3292 info_stream.print(" source: %s", cfs->source()); 3293 } 3294 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) { 3295 Thread* THREAD = Thread::current(); 3296 Klass* caller = 3297 THREAD->is_Java_thread() 3298 ? ((JavaThread*)THREAD)->security_get_caller_class(1) 3299 : NULL; 3300 // caller can be NULL, for example, during a JVMTI VM_Init hook 3301 if (caller != NULL) { 3302 info_stream.print(" source: instance of %s", caller->external_name()); 3303 } else { 3304 // source is unknown 3305 } 3306 } else { 3307 oop class_loader = loader_data->class_loader(); 3308 info_stream.print(" source: %s", class_loader->klass()->external_name()); 3309 } 3310 } else { 3311 info_stream.print(" source: shared objects file"); 3312 } 3313 3314 msg.info("%s", info_stream.as_string()); 3315 3316 if (log_is_enabled(Debug, class, load)) { 3317 stringStream debug_stream; 3318 3319 // Class hierarchy info 3320 debug_stream.print(" klass: " INTPTR_FORMAT " super: " INTPTR_FORMAT, 3321 p2i(this), p2i(superklass())); 3322 3323 // Interfaces 3324 if (local_interfaces() != NULL && local_interfaces()->length() > 0) { 3325 debug_stream.print(" interfaces:"); 3326 int length = local_interfaces()->length(); 3327 for (int i = 0; i < length; i++) { 3328 debug_stream.print(" " INTPTR_FORMAT, 3329 p2i(InstanceKlass::cast(local_interfaces()->at(i)))); 3330 } 3331 } 3332 3333 // Class loader 3334 debug_stream.print(" loader: ["); 3335 loader_data->print_value_on(&debug_stream); 3336 debug_stream.print("]"); 3337 3338 // Classfile checksum 3339 if (cfs) { 3340 debug_stream.print(" bytes: %d checksum: %08x", 3341 cfs->length(), 3342 ClassLoader::crc32(0, (const char*)cfs->buffer(), 3343 cfs->length())); 3344 } 3345 3346 msg.debug("%s", debug_stream.as_string()); 3347 } 3348 } 3349 3350 #if INCLUDE_SERVICES 3351 // Size Statistics 3352 void InstanceKlass::collect_statistics(KlassSizeStats *sz) const { 3353 Klass::collect_statistics(sz); 3354 3355 sz->_inst_size = wordSize * size_helper(); 3356 sz->_vtab_bytes = wordSize * vtable_length(); 3357 sz->_itab_bytes = wordSize * itable_length(); 3358 sz->_nonstatic_oopmap_bytes = wordSize * nonstatic_oop_map_size(); 3359 3360 int n = 0; 3361 n += (sz->_methods_array_bytes = sz->count_array(methods())); 3362 n += (sz->_method_ordering_bytes = sz->count_array(method_ordering())); 3363 n += (sz->_local_interfaces_bytes = sz->count_array(local_interfaces())); 3364 n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces())); 3365 n += (sz->_fields_bytes = sz->count_array(fields())); 3366 n += (sz->_inner_classes_bytes = sz->count_array(inner_classes())); 3367 sz->_ro_bytes += n; 3368 3369 const ConstantPool* cp = constants(); 3370 if (cp) { 3371 cp->collect_statistics(sz); 3372 } 3373 3374 const Annotations* anno = annotations(); 3375 if (anno) { 3376 anno->collect_statistics(sz); 3377 } 3378 3379 const Array<Method*>* methods_array = methods(); 3380 if (methods()) { 3381 for (int i = 0; i < methods_array->length(); i++) { 3382 Method* method = methods_array->at(i); 3383 if (method) { 3384 sz->_method_count ++; 3385 method->collect_statistics(sz); 3386 } 3387 } 3388 } 3389 } 3390 #endif // INCLUDE_SERVICES 3391 3392 // Verification 3393 3394 class VerifyFieldClosure: public OopClosure { 3395 protected: 3396 template <class T> void do_oop_work(T* p) { 3397 oop obj = oopDesc::load_decode_heap_oop(p); 3398 if (!oopDesc::is_oop_or_null(obj)) { 3399 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj)); 3400 Universe::print_on(tty); 3401 guarantee(false, "boom"); 3402 } 3403 } 3404 public: 3405 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); } 3406 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); } 3407 }; 3408 3409 void InstanceKlass::verify_on(outputStream* st) { 3410 #ifndef PRODUCT 3411 // Avoid redundant verifies, this really should be in product. 3412 if (_verify_count == Universe::verify_count()) return; 3413 _verify_count = Universe::verify_count(); 3414 #endif 3415 3416 // Verify Klass 3417 Klass::verify_on(st); 3418 3419 // Verify that klass is present in ClassLoaderData 3420 guarantee(class_loader_data()->contains_klass(this), 3421 "this class isn't found in class loader data"); 3422 3423 // Verify vtables 3424 if (is_linked()) { 3425 // $$$ This used to be done only for m/s collections. Doing it 3426 // always seemed a valid generalization. (DLD -- 6/00) 3427 vtable().verify(st); 3428 } 3429 3430 // Verify first subklass 3431 if (subklass() != NULL) { 3432 guarantee(subklass()->is_klass(), "should be klass"); 3433 } 3434 3435 // Verify siblings 3436 Klass* super = this->super(); 3437 Klass* sib = next_sibling(); 3438 if (sib != NULL) { 3439 if (sib == this) { 3440 fatal("subclass points to itself " PTR_FORMAT, p2i(sib)); 3441 } 3442 3443 guarantee(sib->is_klass(), "should be klass"); 3444 guarantee(sib->super() == super, "siblings should have same superklass"); 3445 } 3446 3447 // Verify implementor fields 3448 Klass* im = implementor(); 3449 if (im != NULL) { 3450 guarantee(is_interface(), "only interfaces should have implementor set"); 3451 guarantee(im->is_klass(), "should be klass"); 3452 guarantee(!im->is_interface() || im == this, 3453 "implementors cannot be interfaces"); 3454 } 3455 3456 // Verify local interfaces 3457 if (local_interfaces()) { 3458 Array<Klass*>* local_interfaces = this->local_interfaces(); 3459 for (int j = 0; j < local_interfaces->length(); j++) { 3460 Klass* e = local_interfaces->at(j); 3461 guarantee(e->is_klass() && e->is_interface(), "invalid local interface"); 3462 } 3463 } 3464 3465 // Verify transitive interfaces 3466 if (transitive_interfaces() != NULL) { 3467 Array<Klass*>* transitive_interfaces = this->transitive_interfaces(); 3468 for (int j = 0; j < transitive_interfaces->length(); j++) { 3469 Klass* e = transitive_interfaces->at(j); 3470 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface"); 3471 } 3472 } 3473 3474 // Verify methods 3475 if (methods() != NULL) { 3476 Array<Method*>* methods = this->methods(); 3477 for (int j = 0; j < methods->length(); j++) { 3478 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3479 } 3480 for (int j = 0; j < methods->length() - 1; j++) { 3481 Method* m1 = methods->at(j); 3482 Method* m2 = methods->at(j + 1); 3483 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3484 } 3485 } 3486 3487 // Verify method ordering 3488 if (method_ordering() != NULL) { 3489 Array<int>* method_ordering = this->method_ordering(); 3490 int length = method_ordering->length(); 3491 if (JvmtiExport::can_maintain_original_method_order() || 3492 ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) { 3493 guarantee(length == methods()->length(), "invalid method ordering length"); 3494 jlong sum = 0; 3495 for (int j = 0; j < length; j++) { 3496 int original_index = method_ordering->at(j); 3497 guarantee(original_index >= 0, "invalid method ordering index"); 3498 guarantee(original_index < length, "invalid method ordering index"); 3499 sum += original_index; 3500 } 3501 // Verify sum of indices 0,1,...,length-1 3502 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum"); 3503 } else { 3504 guarantee(length == 0, "invalid method ordering length"); 3505 } 3506 } 3507 3508 // Verify default methods 3509 if (default_methods() != NULL) { 3510 Array<Method*>* methods = this->default_methods(); 3511 for (int j = 0; j < methods->length(); j++) { 3512 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3513 } 3514 for (int j = 0; j < methods->length() - 1; j++) { 3515 Method* m1 = methods->at(j); 3516 Method* m2 = methods->at(j + 1); 3517 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3518 } 3519 } 3520 3521 // Verify JNI static field identifiers 3522 if (jni_ids() != NULL) { 3523 jni_ids()->verify(this); 3524 } 3525 3526 // Verify other fields 3527 if (array_klasses() != NULL) { 3528 guarantee(array_klasses()->is_klass(), "should be klass"); 3529 } 3530 if (constants() != NULL) { 3531 guarantee(constants()->is_constantPool(), "should be constant pool"); 3532 } 3533 const Klass* host = host_klass(); 3534 if (host != NULL) { 3535 guarantee(host->is_klass(), "should be klass"); 3536 } 3537 } 3538 3539 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) { 3540 Klass::oop_verify_on(obj, st); 3541 VerifyFieldClosure blk; 3542 obj->oop_iterate_no_header(&blk); 3543 } 3544 3545 3546 // JNIid class for jfieldIDs only 3547 // Note to reviewers: 3548 // These JNI functions are just moved over to column 1 and not changed 3549 // in the compressed oops workspace. 3550 JNIid::JNIid(Klass* holder, int offset, JNIid* next) { 3551 _holder = holder; 3552 _offset = offset; 3553 _next = next; 3554 debug_only(_is_static_field_id = false;) 3555 } 3556 3557 3558 JNIid* JNIid::find(int offset) { 3559 JNIid* current = this; 3560 while (current != NULL) { 3561 if (current->offset() == offset) return current; 3562 current = current->next(); 3563 } 3564 return NULL; 3565 } 3566 3567 void JNIid::deallocate(JNIid* current) { 3568 while (current != NULL) { 3569 JNIid* next = current->next(); 3570 delete current; 3571 current = next; 3572 } 3573 } 3574 3575 3576 void JNIid::verify(Klass* holder) { 3577 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields(); 3578 int end_field_offset; 3579 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize); 3580 3581 JNIid* current = this; 3582 while (current != NULL) { 3583 guarantee(current->holder() == holder, "Invalid klass in JNIid"); 3584 #ifdef ASSERT 3585 int o = current->offset(); 3586 if (current->is_static_field_id()) { 3587 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid"); 3588 } 3589 #endif 3590 current = current->next(); 3591 } 3592 } 3593 3594 oop InstanceKlass::klass_holder_phantom() { 3595 oop* addr; 3596 if (is_anonymous()) { 3597 addr = _java_mirror.ptr_raw(); 3598 } else { 3599 addr = &class_loader_data()->_class_loader; 3600 } 3601 return RootAccess<IN_CONCURRENT_ROOT | ON_PHANTOM_OOP_REF>::oop_load(addr); 3602 } 3603 3604 #ifdef ASSERT 3605 void InstanceKlass::set_init_state(ClassState state) { 3606 bool good_state = is_shared() ? (_init_state <= state) 3607 : (_init_state < state); 3608 assert(good_state || state == allocated, "illegal state transition"); 3609 _init_state = (u1)state; 3610 } 3611 #endif 3612 3613 #if INCLUDE_JVMTI 3614 3615 // RedefineClasses() support for previous versions 3616 3617 // Globally, there is at least one previous version of a class to walk 3618 // during class unloading, which is saved because old methods in the class 3619 // are still running. Otherwise the previous version list is cleaned up. 3620 bool InstanceKlass::_has_previous_versions = false; 3621 3622 // Returns true if there are previous versions of a class for class 3623 // unloading only. Also resets the flag to false. purge_previous_version 3624 // will set the flag to true if there are any left, i.e., if there's any 3625 // work to do for next time. This is to avoid the expensive code cache 3626 // walk in CLDG::do_unloading(). 3627 bool InstanceKlass::has_previous_versions_and_reset() { 3628 bool ret = _has_previous_versions; 3629 log_trace(redefine, class, iklass, purge)("Class unloading: has_previous_versions = %s", 3630 ret ? "true" : "false"); 3631 _has_previous_versions = false; 3632 return ret; 3633 } 3634 3635 // Purge previous versions before adding new previous versions of the class and 3636 // during class unloading. 3637 void InstanceKlass::purge_previous_version_list() { 3638 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 3639 assert(has_been_redefined(), "Should only be called for main class"); 3640 3641 // Quick exit. 3642 if (previous_versions() == NULL) { 3643 return; 3644 } 3645 3646 // This klass has previous versions so see what we can cleanup 3647 // while it is safe to do so. 3648 3649 int deleted_count = 0; // leave debugging breadcrumbs 3650 int live_count = 0; 3651 ClassLoaderData* loader_data = class_loader_data(); 3652 assert(loader_data != NULL, "should never be null"); 3653 3654 ResourceMark rm; 3655 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name()); 3656 3657 // previous versions are linked together through the InstanceKlass 3658 InstanceKlass* pv_node = previous_versions(); 3659 InstanceKlass* last = this; 3660 int version = 0; 3661 3662 // check the previous versions list 3663 for (; pv_node != NULL; ) { 3664 3665 ConstantPool* pvcp = pv_node->constants(); 3666 assert(pvcp != NULL, "cp ref was unexpectedly cleared"); 3667 3668 if (!pvcp->on_stack()) { 3669 // If the constant pool isn't on stack, none of the methods 3670 // are executing. Unlink this previous_version. 3671 // The previous version InstanceKlass is on the ClassLoaderData deallocate list 3672 // so will be deallocated during the next phase of class unloading. 3673 log_trace(redefine, class, iklass, purge) 3674 ("previous version " INTPTR_FORMAT " is dead.", p2i(pv_node)); 3675 // For debugging purposes. 3676 pv_node->set_is_scratch_class(); 3677 // Unlink from previous version list. 3678 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data"); 3679 InstanceKlass* next = pv_node->previous_versions(); 3680 pv_node->link_previous_versions(NULL); // point next to NULL 3681 last->link_previous_versions(next); 3682 // Add to the deallocate list after unlinking 3683 loader_data->add_to_deallocate_list(pv_node); 3684 pv_node = next; 3685 deleted_count++; 3686 version++; 3687 continue; 3688 } else { 3689 log_trace(redefine, class, iklass, purge)("previous version " INTPTR_FORMAT " is alive", p2i(pv_node)); 3690 assert(pvcp->pool_holder() != NULL, "Constant pool with no holder"); 3691 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack"); 3692 live_count++; 3693 // found a previous version for next time we do class unloading 3694 _has_previous_versions = true; 3695 } 3696 3697 // At least one method is live in this previous version. 3698 // Reset dead EMCP methods not to get breakpoints. 3699 // All methods are deallocated when all of the methods for this class are no 3700 // longer running. 3701 Array<Method*>* method_refs = pv_node->methods(); 3702 if (method_refs != NULL) { 3703 log_trace(redefine, class, iklass, purge)("previous methods length=%d", method_refs->length()); 3704 for (int j = 0; j < method_refs->length(); j++) { 3705 Method* method = method_refs->at(j); 3706 3707 if (!method->on_stack()) { 3708 // no breakpoints for non-running methods 3709 if (method->is_running_emcp()) { 3710 method->set_running_emcp(false); 3711 } 3712 } else { 3713 assert (method->is_obsolete() || method->is_running_emcp(), 3714 "emcp method cannot run after emcp bit is cleared"); 3715 log_trace(redefine, class, iklass, purge) 3716 ("purge: %s(%s): prev method @%d in version @%d is alive", 3717 method->name()->as_C_string(), method->signature()->as_C_string(), j, version); 3718 } 3719 } 3720 } 3721 // next previous version 3722 last = pv_node; 3723 pv_node = pv_node->previous_versions(); 3724 version++; 3725 } 3726 log_trace(redefine, class, iklass, purge) 3727 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count); 3728 } 3729 3730 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods, 3731 int emcp_method_count) { 3732 int obsolete_method_count = old_methods->length() - emcp_method_count; 3733 3734 if (emcp_method_count != 0 && obsolete_method_count != 0 && 3735 _previous_versions != NULL) { 3736 // We have a mix of obsolete and EMCP methods so we have to 3737 // clear out any matching EMCP method entries the hard way. 3738 int local_count = 0; 3739 for (int i = 0; i < old_methods->length(); i++) { 3740 Method* old_method = old_methods->at(i); 3741 if (old_method->is_obsolete()) { 3742 // only obsolete methods are interesting 3743 Symbol* m_name = old_method->name(); 3744 Symbol* m_signature = old_method->signature(); 3745 3746 // previous versions are linked together through the InstanceKlass 3747 int j = 0; 3748 for (InstanceKlass* prev_version = _previous_versions; 3749 prev_version != NULL; 3750 prev_version = prev_version->previous_versions(), j++) { 3751 3752 Array<Method*>* method_refs = prev_version->methods(); 3753 for (int k = 0; k < method_refs->length(); k++) { 3754 Method* method = method_refs->at(k); 3755 3756 if (!method->is_obsolete() && 3757 method->name() == m_name && 3758 method->signature() == m_signature) { 3759 // The current RedefineClasses() call has made all EMCP 3760 // versions of this method obsolete so mark it as obsolete 3761 log_trace(redefine, class, iklass, add) 3762 ("%s(%s): flush obsolete method @%d in version @%d", 3763 m_name->as_C_string(), m_signature->as_C_string(), k, j); 3764 3765 method->set_is_obsolete(); 3766 break; 3767 } 3768 } 3769 3770 // The previous loop may not find a matching EMCP method, but 3771 // that doesn't mean that we can optimize and not go any 3772 // further back in the PreviousVersion generations. The EMCP 3773 // method for this generation could have already been made obsolete, 3774 // but there still may be an older EMCP method that has not 3775 // been made obsolete. 3776 } 3777 3778 if (++local_count >= obsolete_method_count) { 3779 // no more obsolete methods so bail out now 3780 break; 3781 } 3782 } 3783 } 3784 } 3785 } 3786 3787 // Save the scratch_class as the previous version if any of the methods are running. 3788 // The previous_versions are used to set breakpoints in EMCP methods and they are 3789 // also used to clean MethodData links to redefined methods that are no longer running. 3790 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class, 3791 int emcp_method_count) { 3792 assert(Thread::current()->is_VM_thread(), 3793 "only VMThread can add previous versions"); 3794 3795 ResourceMark rm; 3796 log_trace(redefine, class, iklass, add) 3797 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count); 3798 3799 // Clean out old previous versions for this class 3800 purge_previous_version_list(); 3801 3802 // Mark newly obsolete methods in remaining previous versions. An EMCP method from 3803 // a previous redefinition may be made obsolete by this redefinition. 3804 Array<Method*>* old_methods = scratch_class->methods(); 3805 mark_newly_obsolete_methods(old_methods, emcp_method_count); 3806 3807 // If the constant pool for this previous version of the class 3808 // is not marked as being on the stack, then none of the methods 3809 // in this previous version of the class are on the stack so 3810 // we don't need to add this as a previous version. 3811 ConstantPool* cp_ref = scratch_class->constants(); 3812 if (!cp_ref->on_stack()) { 3813 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running"); 3814 // For debugging purposes. 3815 scratch_class->set_is_scratch_class(); 3816 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class); 3817 return; 3818 } 3819 3820 if (emcp_method_count != 0) { 3821 // At least one method is still running, check for EMCP methods 3822 for (int i = 0; i < old_methods->length(); i++) { 3823 Method* old_method = old_methods->at(i); 3824 if (!old_method->is_obsolete() && old_method->on_stack()) { 3825 // if EMCP method (not obsolete) is on the stack, mark as EMCP so that 3826 // we can add breakpoints for it. 3827 3828 // We set the method->on_stack bit during safepoints for class redefinition 3829 // and use this bit to set the is_running_emcp bit. 3830 // After the safepoint, the on_stack bit is cleared and the running emcp 3831 // method may exit. If so, we would set a breakpoint in a method that 3832 // is never reached, but this won't be noticeable to the programmer. 3833 old_method->set_running_emcp(true); 3834 log_trace(redefine, class, iklass, add) 3835 ("EMCP method %s is on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method)); 3836 } else if (!old_method->is_obsolete()) { 3837 log_trace(redefine, class, iklass, add) 3838 ("EMCP method %s is NOT on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method)); 3839 } 3840 } 3841 } 3842 3843 // Add previous version if any methods are still running. 3844 // Set has_previous_version flag for processing during class unloading. 3845 _has_previous_versions = true; 3846 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack."); 3847 assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version"); 3848 scratch_class->link_previous_versions(previous_versions()); 3849 link_previous_versions(scratch_class); 3850 } // end add_previous_version() 3851 3852 #endif // INCLUDE_JVMTI 3853 3854 Method* InstanceKlass::method_with_idnum(int idnum) { 3855 Method* m = NULL; 3856 if (idnum < methods()->length()) { 3857 m = methods()->at(idnum); 3858 } 3859 if (m == NULL || m->method_idnum() != idnum) { 3860 for (int index = 0; index < methods()->length(); ++index) { 3861 m = methods()->at(index); 3862 if (m->method_idnum() == idnum) { 3863 return m; 3864 } 3865 } 3866 // None found, return null for the caller to handle. 3867 return NULL; 3868 } 3869 return m; 3870 } 3871 3872 3873 Method* InstanceKlass::method_with_orig_idnum(int idnum) { 3874 if (idnum >= methods()->length()) { 3875 return NULL; 3876 } 3877 Method* m = methods()->at(idnum); 3878 if (m != NULL && m->orig_method_idnum() == idnum) { 3879 return m; 3880 } 3881 // Obsolete method idnum does not match the original idnum 3882 for (int index = 0; index < methods()->length(); ++index) { 3883 m = methods()->at(index); 3884 if (m->orig_method_idnum() == idnum) { 3885 return m; 3886 } 3887 } 3888 // None found, return null for the caller to handle. 3889 return NULL; 3890 } 3891 3892 3893 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) { 3894 InstanceKlass* holder = get_klass_version(version); 3895 if (holder == NULL) { 3896 return NULL; // The version of klass is gone, no method is found 3897 } 3898 Method* method = holder->method_with_orig_idnum(idnum); 3899 return method; 3900 } 3901 3902 #if INCLUDE_JVMTI 3903 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() { 3904 if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 3905 // Ignore the archived class stream data 3906 return NULL; 3907 } else { 3908 return _cached_class_file; 3909 } 3910 } 3911 3912 jint InstanceKlass::get_cached_class_file_len() { 3913 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file); 3914 } 3915 3916 unsigned char * InstanceKlass::get_cached_class_file_bytes() { 3917 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file); 3918 } 3919 3920 #if INCLUDE_CDS 3921 JvmtiCachedClassFileData* InstanceKlass::get_archived_class_data() { 3922 if (DumpSharedSpaces) { 3923 return _cached_class_file; 3924 } else { 3925 assert(this->is_shared(), "class should be shared"); 3926 if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 3927 return _cached_class_file; 3928 } else { 3929 return NULL; 3930 } 3931 } 3932 } 3933 #endif 3934 #endif 3935 3936 #define THROW_DVT_ERROR(s) \ 3937 Exceptions::fthrow(THREAD_AND_LOCATION, vmSymbols::java_lang_IncompatibleClassChangeError(), \ 3938 "ValueCapableClass class '%s' %s", external_name(),(s)); \ 3939 return