1 /* 2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/classFileParser.hpp" 27 #include "classfile/classFileStream.hpp" 28 #include "classfile/javaClasses.hpp" 29 #include "classfile/systemDictionary.hpp" 30 #include "classfile/verifier.hpp" 31 #include "classfile/vmSymbols.hpp" 32 #include "code/dependencyContext.hpp" 33 #include "compiler/compileBroker.hpp" 34 #include "gc/shared/collectedHeap.inline.hpp" 35 #include "gc/shared/specialized_oop_closures.hpp" 36 #include "interpreter/oopMapCache.hpp" 37 #include "interpreter/rewriter.hpp" 38 #include "jvmtifiles/jvmti.h" 39 #include "logging/log.hpp" 40 #include "memory/heapInspection.hpp" 41 #include "memory/iterator.inline.hpp" 42 #include "memory/metadataFactory.hpp" 43 #include "memory/oopFactory.hpp" 44 #include "oops/fieldStreams.hpp" 45 #include "oops/instanceClassLoaderKlass.hpp" 46 #include "oops/instanceKlass.inline.hpp" 47 #include "oops/instanceMirrorKlass.hpp" 48 #include "oops/instanceOop.hpp" 49 #include "oops/klass.inline.hpp" 50 #include "oops/method.hpp" 51 #include "oops/oop.inline.hpp" 52 #include "oops/symbol.hpp" 53 #include "oops/valueKlass.hpp" 54 #include "prims/jvmtiExport.hpp" 55 #include "prims/jvmtiRedefineClasses.hpp" 56 #include "prims/jvmtiRedefineClassesTrace.hpp" 57 #include "prims/jvmtiThreadState.hpp" 58 #include "prims/methodComparator.hpp" 59 #include "runtime/atomic.inline.hpp" 60 #include "runtime/fieldDescriptor.hpp" 61 #include "runtime/handles.inline.hpp" 62 #include "runtime/javaCalls.hpp" 63 #include "runtime/mutexLocker.hpp" 64 #include "runtime/orderAccess.inline.hpp" 65 #include "runtime/thread.inline.hpp" 66 #include "services/classLoadingService.hpp" 67 #include "services/threadService.hpp" 68 #include "utilities/dtrace.hpp" 69 #include "utilities/macros.hpp" 70 #include "logging/log.hpp" 71 #ifdef COMPILER1 72 #include "c1/c1_Compiler.hpp" 73 #endif 74 75 #ifdef DTRACE_ENABLED 76 77 78 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED 79 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE 80 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT 81 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS 82 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED 83 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT 84 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR 85 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END 86 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type) \ 87 { \ 88 char* data = NULL; \ 89 int len = 0; \ 90 Symbol* name = (clss)->name(); \ 91 if (name != NULL) { \ 92 data = (char*)name->bytes(); \ 93 len = name->utf8_length(); \ 94 } \ 95 HOTSPOT_CLASS_INITIALIZATION_##type( \ 96 data, len, (clss)->class_loader(), thread_type); \ 97 } 98 99 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \ 100 { \ 101 char* data = NULL; \ 102 int len = 0; \ 103 Symbol* name = (clss)->name(); \ 104 if (name != NULL) { \ 105 data = (char*)name->bytes(); \ 106 len = name->utf8_length(); \ 107 } \ 108 HOTSPOT_CLASS_INITIALIZATION_##type( \ 109 data, len, (clss)->class_loader(), thread_type, wait); \ 110 } 111 112 #else // ndef DTRACE_ENABLED 113 114 #define DTRACE_CLASSINIT_PROBE(type, clss, thread_type) 115 #define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) 116 117 #endif // ndef DTRACE_ENABLED 118 119 volatile int InstanceKlass::_total_instanceKlass_count = 0; 120 121 static inline bool is_class_loader(const Symbol* class_name, 122 const ClassFileParser& parser) { 123 assert(class_name != NULL, "invariant"); 124 125 if (class_name == vmSymbols::java_lang_ClassLoader()) { 126 return true; 127 } 128 129 if (SystemDictionary::ClassLoader_klass_loaded()) { 130 const Klass* const super_klass = parser.super_klass(); 131 if (super_klass != NULL) { 132 if (super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass())) { 133 return true; 134 } 135 } 136 } 137 return false; 138 } 139 140 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) { 141 const int size = InstanceKlass::size(parser.vtable_size(), 142 parser.itable_size(), 143 nonstatic_oop_map_size(parser.total_oop_map_count()), 144 parser.is_interface(), 145 parser.is_anonymous()); 146 147 const Symbol* const class_name = parser.class_name(); 148 assert(class_name != NULL, "invariant"); 149 ClassLoaderData* loader_data = parser.loader_data(); 150 assert(loader_data != NULL, "invariant"); 151 152 InstanceKlass* ik; 153 154 // Allocation 155 if (REF_NONE == parser.reference_type()) { 156 if (class_name == vmSymbols::java_lang_Class()) { 157 // mirror 158 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser); 159 } 160 else if (is_class_loader(class_name, parser)) { 161 // class loader 162 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser); 163 } 164 else if (parser.is_value_type()) { 165 // value type 166 ik = new (loader_data, size, THREAD) ValueKlass(parser); 167 } 168 else { 169 // normal 170 ik = new (loader_data, size, THREAD) InstanceKlass(parser, InstanceKlass::_misc_kind_other); 171 } 172 } 173 else { 174 // reference 175 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser); 176 } 177 178 // Check for pending exception before adding to the loader data and incrementing 179 // class count. Can get OOM here. 180 if (HAS_PENDING_EXCEPTION) { 181 return NULL; 182 } 183 assert(ik != NULL, "invariant"); 184 185 const bool publicize = !parser.is_internal(); 186 #ifdef ASSERT 187 assert(ik->size() == size, ""); 188 ik->bounds_check((address) ik->start_of_vtable(), false, size); 189 ik->bounds_check((address) ik->start_of_itable(), false, size); 190 ik->bounds_check((address) ik->end_of_itable(), true, size); 191 ik->bounds_check((address) ik->end_of_nonstatic_oop_maps(), true, size); 192 #endif //ASSERT 193 194 // Add all classes to our internal class loader list here, 195 // including classes in the bootstrap (NULL) class loader. 196 loader_data->add_class(ik, publicize); 197 Atomic::inc(&_total_instanceKlass_count); 198 199 return ik; 200 } 201 202 #ifndef PRODUCT 203 bool InstanceKlass::bounds_check(address addr, bool edge_ok, intptr_t size_in_bytes) const { 204 const char* bad = NULL; 205 address end = NULL; 206 if (addr < (address)this) { 207 bad = "before"; 208 } else if (addr == (address)this) { 209 if (edge_ok) return true; 210 bad = "just before"; 211 } else if (addr == (end = (address)this + sizeof(intptr_t) * (size_in_bytes < 0 ? size() : size_in_bytes))) { 212 if (edge_ok) return true; 213 bad = "just after"; 214 } else if (addr > end) { 215 bad = "after"; 216 } else { 217 return true; 218 } 219 tty->print_cr("%s object bounds: " INTPTR_FORMAT " [" INTPTR_FORMAT ".." INTPTR_FORMAT "]", 220 bad, (intptr_t)addr, (intptr_t)this, (intptr_t)end); 221 Verbose = WizardMode = true; this->print(); //@@ 222 return false; 223 } 224 #endif //PRODUCT 225 226 // copy method ordering from resource area to Metaspace 227 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) { 228 if (m != NULL) { 229 // allocate a new array and copy contents (memcpy?) 230 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK); 231 for (int i = 0; i < m->length(); i++) { 232 _method_ordering->at_put(i, m->at(i)); 233 } 234 } else { 235 _method_ordering = Universe::the_empty_int_array(); 236 } 237 } 238 239 // create a new array of vtable_indices for default methods 240 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) { 241 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL); 242 assert(default_vtable_indices() == NULL, "only create once"); 243 set_default_vtable_indices(vtable_indices); 244 return vtable_indices; 245 } 246 247 InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind) : 248 _static_field_size(parser.static_field_size()), 249 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())), 250 _itable_len(parser.itable_size()), 251 _reference_type(parser.reference_type()) { 252 set_vtable_length(parser.vtable_size()); 253 set_kind(kind); 254 set_access_flags(parser.access_flags()); 255 set_is_anonymous(parser.is_anonymous()); 256 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(), 257 false)); 258 259 assert(NULL == _methods, "underlying memory not zeroed?"); 260 assert(is_instance_klass(), "is layout incorrect?"); 261 assert(size_helper() == parser.layout_size(), "incorrect size_helper?"); 262 } 263 264 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data, 265 Array<Method*>* methods) { 266 if (methods != NULL && methods != Universe::the_empty_method_array() && 267 !methods->is_shared()) { 268 for (int i = 0; i < methods->length(); i++) { 269 Method* method = methods->at(i); 270 if (method == NULL) continue; // maybe null if error processing 271 // Only want to delete methods that are not executing for RedefineClasses. 272 // The previous version will point to them so they're not totally dangling 273 assert (!method->on_stack(), "shouldn't be called with methods on stack"); 274 MetadataFactory::free_metadata(loader_data, method); 275 } 276 MetadataFactory::free_array<Method*>(loader_data, methods); 277 } 278 } 279 280 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data, 281 const Klass* super_klass, 282 Array<Klass*>* local_interfaces, 283 Array<Klass*>* transitive_interfaces) { 284 // Only deallocate transitive interfaces if not empty, same as super class 285 // or same as local interfaces. See code in parseClassFile. 286 Array<Klass*>* ti = transitive_interfaces; 287 if (ti != Universe::the_empty_klass_array() && ti != local_interfaces) { 288 // check that the interfaces don't come from super class 289 Array<Klass*>* sti = (super_klass == NULL) ? NULL : 290 InstanceKlass::cast(super_klass)->transitive_interfaces(); 291 if (ti != sti && ti != NULL && !ti->is_shared()) { 292 MetadataFactory::free_array<Klass*>(loader_data, ti); 293 } 294 } 295 296 // local interfaces can be empty 297 if (local_interfaces != Universe::the_empty_klass_array() && 298 local_interfaces != NULL && !local_interfaces->is_shared()) { 299 MetadataFactory::free_array<Klass*>(loader_data, local_interfaces); 300 } 301 } 302 303 // This function deallocates the metadata and C heap pointers that the 304 // InstanceKlass points to. 305 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) { 306 307 // Orphan the mirror first, CMS thinks it's still live. 308 if (java_mirror() != NULL) { 309 java_lang_Class::set_klass(java_mirror(), NULL); 310 } 311 312 // Need to take this class off the class loader data list. 313 loader_data->remove_class(this); 314 315 // The array_klass for this class is created later, after error handling. 316 // For class redefinition, we keep the original class so this scratch class 317 // doesn't have an array class. Either way, assert that there is nothing 318 // to deallocate. 319 assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet"); 320 321 // Release C heap allocated data that this might point to, which includes 322 // reference counting symbol names. 323 release_C_heap_structures(); 324 325 deallocate_methods(loader_data, methods()); 326 set_methods(NULL); 327 328 if (method_ordering() != NULL && 329 method_ordering() != Universe::the_empty_int_array() && 330 !method_ordering()->is_shared()) { 331 MetadataFactory::free_array<int>(loader_data, method_ordering()); 332 } 333 set_method_ordering(NULL); 334 335 // default methods can be empty 336 if (default_methods() != NULL && 337 default_methods() != Universe::the_empty_method_array() && 338 !default_methods()->is_shared()) { 339 MetadataFactory::free_array<Method*>(loader_data, default_methods()); 340 } 341 // Do NOT deallocate the default methods, they are owned by superinterfaces. 342 set_default_methods(NULL); 343 344 // default methods vtable indices can be empty 345 if (default_vtable_indices() != NULL && 346 !default_vtable_indices()->is_shared()) { 347 MetadataFactory::free_array<int>(loader_data, default_vtable_indices()); 348 } 349 set_default_vtable_indices(NULL); 350 351 352 // This array is in Klass, but remove it with the InstanceKlass since 353 // this place would be the only caller and it can share memory with transitive 354 // interfaces. 355 if (secondary_supers() != NULL && 356 secondary_supers() != Universe::the_empty_klass_array() && 357 secondary_supers() != transitive_interfaces() && 358 !secondary_supers()->is_shared()) { 359 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers()); 360 } 361 set_secondary_supers(NULL); 362 363 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces()); 364 set_transitive_interfaces(NULL); 365 set_local_interfaces(NULL); 366 367 if (fields() != NULL && !fields()->is_shared()) { 368 MetadataFactory::free_array<jushort>(loader_data, fields()); 369 } 370 set_fields(NULL, 0); 371 372 // If a method from a redefined class is using this constant pool, don't 373 // delete it, yet. The new class's previous version will point to this. 374 if (constants() != NULL) { 375 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack"); 376 if (!constants()->is_shared()) { 377 MetadataFactory::free_metadata(loader_data, constants()); 378 } 379 // Delete any cached resolution errors for the constant pool 380 SystemDictionary::delete_resolution_error(constants()); 381 382 set_constants(NULL); 383 } 384 385 if (inner_classes() != NULL && 386 inner_classes() != Universe::the_empty_short_array() && 387 !inner_classes()->is_shared()) { 388 MetadataFactory::free_array<jushort>(loader_data, inner_classes()); 389 } 390 set_inner_classes(NULL); 391 392 // We should deallocate the Annotations instance if it's not in shared spaces. 393 if (annotations() != NULL && !annotations()->is_shared()) { 394 MetadataFactory::free_metadata(loader_data, annotations()); 395 } 396 set_annotations(NULL); 397 } 398 399 bool InstanceKlass::should_be_initialized() const { 400 return !is_initialized(); 401 } 402 403 klassItable* InstanceKlass::itable() const { 404 return new klassItable(instanceKlassHandle(this)); 405 } 406 407 void InstanceKlass::eager_initialize(Thread *thread) { 408 if (!EagerInitialization) return; 409 410 if (this->is_not_initialized()) { 411 // abort if the the class has a class initializer 412 if (this->class_initializer() != NULL) return; 413 414 // abort if it is java.lang.Object (initialization is handled in genesis) 415 Klass* super = this->super(); 416 if (super == NULL) return; 417 418 // abort if the super class should be initialized 419 if (!InstanceKlass::cast(super)->is_initialized()) return; 420 421 // call body to expose the this pointer 422 instanceKlassHandle this_k(thread, this); 423 eager_initialize_impl(this_k); 424 } 425 } 426 427 // JVMTI spec thinks there are signers and protection domain in the 428 // instanceKlass. These accessors pretend these fields are there. 429 // The hprof specification also thinks these fields are in InstanceKlass. 430 oop InstanceKlass::protection_domain() const { 431 // return the protection_domain from the mirror 432 return java_lang_Class::protection_domain(java_mirror()); 433 } 434 435 // To remove these from requires an incompatible change and CCC request. 436 objArrayOop InstanceKlass::signers() const { 437 // return the signers from the mirror 438 return java_lang_Class::signers(java_mirror()); 439 } 440 441 oop InstanceKlass::init_lock() const { 442 // return the init lock from the mirror 443 oop lock = java_lang_Class::init_lock(java_mirror()); 444 // Prevent reordering with any access of initialization state 445 OrderAccess::loadload(); 446 assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state 447 "only fully initialized state can have a null lock"); 448 return lock; 449 } 450 451 // Set the initialization lock to null so the object can be GC'ed. Any racing 452 // threads to get this lock will see a null lock and will not lock. 453 // That's okay because they all check for initialized state after getting 454 // the lock and return. 455 void InstanceKlass::fence_and_clear_init_lock() { 456 // make sure previous stores are all done, notably the init_state. 457 OrderAccess::storestore(); 458 java_lang_Class::set_init_lock(java_mirror(), NULL); 459 assert(!is_not_initialized(), "class must be initialized now"); 460 } 461 462 void InstanceKlass::eager_initialize_impl(instanceKlassHandle this_k) { 463 EXCEPTION_MARK; 464 oop init_lock = this_k->init_lock(); 465 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 466 467 // abort if someone beat us to the initialization 468 if (!this_k->is_not_initialized()) return; // note: not equivalent to is_initialized() 469 470 ClassState old_state = this_k->init_state(); 471 link_class_impl(this_k, true, THREAD); 472 if (HAS_PENDING_EXCEPTION) { 473 CLEAR_PENDING_EXCEPTION; 474 // Abort if linking the class throws an exception. 475 476 // Use a test to avoid redundantly resetting the state if there's 477 // no change. Set_init_state() asserts that state changes make 478 // progress, whereas here we might just be spinning in place. 479 if( old_state != this_k->_init_state ) 480 this_k->set_init_state (old_state); 481 } else { 482 // linking successfull, mark class as initialized 483 this_k->set_init_state (fully_initialized); 484 this_k->fence_and_clear_init_lock(); 485 // trace 486 if (log_is_enabled(Info, classinit)) { 487 ResourceMark rm(THREAD); 488 log_info(classinit)("[Initialized %s without side effects]", this_k->external_name()); 489 } 490 } 491 } 492 493 494 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization 495 // process. The step comments refers to the procedure described in that section. 496 // Note: implementation moved to static method to expose the this pointer. 497 void InstanceKlass::initialize(TRAPS) { 498 if (this->should_be_initialized()) { 499 HandleMark hm(THREAD); 500 instanceKlassHandle this_k(THREAD, this); 501 initialize_impl(this_k, CHECK); 502 // Note: at this point the class may be initialized 503 // OR it may be in the state of being initialized 504 // in case of recursive initialization! 505 } else { 506 assert(is_initialized(), "sanity check"); 507 } 508 } 509 510 511 bool InstanceKlass::verify_code( 512 instanceKlassHandle this_k, bool throw_verifyerror, TRAPS) { 513 // 1) Verify the bytecodes 514 Verifier::Mode mode = 515 throw_verifyerror ? Verifier::ThrowException : Verifier::NoException; 516 return Verifier::verify(this_k, mode, this_k->should_verify_class(), THREAD); 517 } 518 519 520 // Used exclusively by the shared spaces dump mechanism to prevent 521 // classes mapped into the shared regions in new VMs from appearing linked. 522 523 void InstanceKlass::unlink_class() { 524 assert(is_linked(), "must be linked"); 525 _init_state = loaded; 526 } 527 528 void InstanceKlass::link_class(TRAPS) { 529 assert(is_loaded(), "must be loaded"); 530 if (!is_linked()) { 531 HandleMark hm(THREAD); 532 instanceKlassHandle this_k(THREAD, this); 533 link_class_impl(this_k, true, CHECK); 534 } 535 } 536 537 // Called to verify that a class can link during initialization, without 538 // throwing a VerifyError. 539 bool InstanceKlass::link_class_or_fail(TRAPS) { 540 assert(is_loaded(), "must be loaded"); 541 if (!is_linked()) { 542 HandleMark hm(THREAD); 543 instanceKlassHandle this_k(THREAD, this); 544 link_class_impl(this_k, false, CHECK_false); 545 } 546 return is_linked(); 547 } 548 549 bool InstanceKlass::link_class_impl( 550 instanceKlassHandle this_k, bool throw_verifyerror, TRAPS) { 551 // check for error state 552 if (this_k->is_in_error_state()) { 553 ResourceMark rm(THREAD); 554 THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(), 555 this_k->external_name(), false); 556 } 557 // return if already verified 558 if (this_k->is_linked()) { 559 return true; 560 } 561 562 // Timing 563 // timer handles recursion 564 assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl"); 565 JavaThread* jt = (JavaThread*)THREAD; 566 567 // link super class before linking this class 568 instanceKlassHandle super(THREAD, this_k->super()); 569 if (super.not_null()) { 570 if (super->is_interface()) { // check if super class is an interface 571 ResourceMark rm(THREAD); 572 Exceptions::fthrow( 573 THREAD_AND_LOCATION, 574 vmSymbols::java_lang_IncompatibleClassChangeError(), 575 "class %s has interface %s as super class", 576 this_k->external_name(), 577 super->external_name() 578 ); 579 return false; 580 } 581 582 link_class_impl(super, throw_verifyerror, CHECK_false); 583 } 584 585 // link all interfaces implemented by this class before linking this class 586 Array<Klass*>* interfaces = this_k->local_interfaces(); 587 int num_interfaces = interfaces->length(); 588 for (int index = 0; index < num_interfaces; index++) { 589 HandleMark hm(THREAD); 590 instanceKlassHandle ih(THREAD, interfaces->at(index)); 591 link_class_impl(ih, throw_verifyerror, CHECK_false); 592 } 593 594 // If a value type is referenced by a class (either as a field type or a 595 // method argument or return type) this value type must be loaded during 596 // the linking of this class because size and properties of the value type 597 // must be known in order to be able to perform value type optimizations 598 599 // Note: circular dependencies between value types are not handled yet 600 601 // Note: one case is not handled yet: arrays of value types => FixMe 602 603 // Note: the current implementation is not optimized because the search for 604 // value types is performed on all classes. It would be more efficient to 605 // detect value types during verification and 'tag' the classes for which 606 // value type loading is required. However, this optimization won't be 607 // applicable to classes that are not verified 608 609 // First step: fields 610 for (JavaFieldStream fs(this_k); !fs.done(); fs.next()) { 611 ResourceMark rm(THREAD); 612 if (fs.field_descriptor().field_type() == T_VALUETYPE) { 613 Symbol* signature = fs.field_descriptor().signature(); 614 // Get current loader and protection domain first. 615 oop loader = this_k->class_loader(); 616 oop protection_domain = this_k->protection_domain(); 617 bool ok = SystemDictionary::resolve_or_fail(signature, 618 Handle(THREAD, loader), Handle(THREAD, protection_domain), true, 619 THREAD); 620 if (!ok) { 621 THROW_(vmSymbols::java_lang_LinkageError(), false); 622 } 623 } 624 } 625 626 // Second step: methods arguments and return types 627 // for (int i = 0; i < this_k->constants()->length(); i++) { 628 // if (this_k->constants()->tag_at(i).is_method()) { 629 // Symbol* signature = this_k->constants()->signature_ref_at(i); 630 // ResourceMark rm(THREAD); 631 // for (SignatureStream ss(signature); !ss.is_done(); ss.next()) { 632 // if (ss.type() == T_VALUETYPE) { 633 // Symbol* sig = ss.as_symbol(THREAD); 634 // // Get current loader and protection domain first. 635 // oop loader = this_k->class_loader(); 636 // oop protection_domain = this_k->protection_domain(); 637 // 638 // bool ok = SystemDictionary::resolve_or_fail(sig, 639 // Handle(THREAD, loader), Handle(THREAD, protection_domain), true, 640 // THREAD); 641 // if (!ok) { 642 // THROW_(vmSymbols::java_lang_LinkageError(), false); 643 // } 644 // } 645 // } 646 // } 647 // } 648 649 // in case the class is linked in the process of linking its superclasses 650 if (this_k->is_linked()) { 651 return true; 652 } 653 // trace only the link time for this klass that includes 654 // the verification time 655 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(), 656 ClassLoader::perf_class_link_selftime(), 657 ClassLoader::perf_classes_linked(), 658 jt->get_thread_stat()->perf_recursion_counts_addr(), 659 jt->get_thread_stat()->perf_timers_addr(), 660 PerfClassTraceTime::CLASS_LINK); 661 662 // verification & rewriting 663 { 664 oop init_lock = this_k->init_lock(); 665 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 666 // rewritten will have been set if loader constraint error found 667 // on an earlier link attempt 668 // don't verify or rewrite if already rewritten 669 670 if (!this_k->is_linked()) { 671 if (!this_k->is_rewritten()) { 672 { 673 bool verify_ok = verify_code(this_k, throw_verifyerror, THREAD); 674 if (!verify_ok) { 675 return false; 676 } 677 } 678 679 // Just in case a side-effect of verify linked this class already 680 // (which can sometimes happen since the verifier loads classes 681 // using custom class loaders, which are free to initialize things) 682 if (this_k->is_linked()) { 683 return true; 684 } 685 686 // also sets rewritten 687 this_k->rewrite_class(CHECK_false); 688 } 689 690 // relocate jsrs and link methods after they are all rewritten 691 this_k->link_methods(CHECK_false); 692 693 // Initialize the vtable and interface table after 694 // methods have been rewritten since rewrite may 695 // fabricate new Method*s. 696 // also does loader constraint checking 697 if (!this_k()->is_shared()) { 698 ResourceMark rm(THREAD); 699 this_k->vtable()->initialize_vtable(true, CHECK_false); 700 this_k->itable()->initialize_itable(true, CHECK_false); 701 } 702 #ifdef ASSERT 703 else { 704 ResourceMark rm(THREAD); 705 this_k->vtable()->verify(tty, true); 706 // In case itable verification is ever added. 707 // this_k->itable()->verify(tty, true); 708 } 709 #endif 710 // DVT/VCC linking implies linking it's partner 711 if (this_k->is_derive_value_type()) { 712 // Simple state check to prevent infinite link calls 713 if (!this_k->derive_value_type_klass()->is_rewritten()) { 714 this_k->derive_value_type_klass()->link_class(CHECK_false); 715 } 716 } 717 this_k->set_init_state(linked); 718 if (JvmtiExport::should_post_class_prepare()) { 719 Thread *thread = THREAD; 720 assert(thread->is_Java_thread(), "thread->is_Java_thread()"); 721 JvmtiExport::post_class_prepare((JavaThread *) thread, this_k()); 722 } 723 } 724 } 725 return true; 726 } 727 728 729 // Rewrite the byte codes of all of the methods of a class. 730 // The rewriter must be called exactly once. Rewriting must happen after 731 // verification but before the first method of the class is executed. 732 void InstanceKlass::rewrite_class(TRAPS) { 733 assert(is_loaded(), "must be loaded"); 734 instanceKlassHandle this_k(THREAD, this); 735 if (this_k->is_rewritten()) { 736 assert(this_k()->is_shared(), "rewriting an unshared class?"); 737 return; 738 } 739 Rewriter::rewrite(this_k, CHECK); 740 this_k->set_rewritten(); 741 } 742 743 // Now relocate and link method entry points after class is rewritten. 744 // This is outside is_rewritten flag. In case of an exception, it can be 745 // executed more than once. 746 void InstanceKlass::link_methods(TRAPS) { 747 int len = methods()->length(); 748 for (int i = len-1; i >= 0; i--) { 749 methodHandle m(THREAD, methods()->at(i)); 750 751 // Set up method entry points for compiler and interpreter . 752 m->link_method(m, CHECK); 753 } 754 } 755 756 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access) 757 void InstanceKlass::initialize_super_interfaces(instanceKlassHandle this_k, TRAPS) { 758 if (this_k->has_default_methods()) { 759 for (int i = 0; i < this_k->local_interfaces()->length(); ++i) { 760 Klass* iface = this_k->local_interfaces()->at(i); 761 InstanceKlass* ik = InstanceKlass::cast(iface); 762 if (ik->should_be_initialized()) { 763 if (ik->has_default_methods()) { 764 ik->initialize_super_interfaces(ik, THREAD); 765 } 766 // Only initialize() interfaces that "declare" concrete methods. 767 // has_default_methods drives searching superinterfaces since it 768 // means has_default_methods in its superinterface hierarchy 769 if (!HAS_PENDING_EXCEPTION && ik->declares_default_methods()) { 770 ik->initialize(THREAD); 771 } 772 if (HAS_PENDING_EXCEPTION) { 773 Handle e(THREAD, PENDING_EXCEPTION); 774 CLEAR_PENDING_EXCEPTION; 775 { 776 EXCEPTION_MARK; 777 // Locks object, set state, and notify all waiting threads 778 this_k->set_initialization_state_and_notify( 779 initialization_error, THREAD); 780 781 // ignore any exception thrown, superclass initialization error is 782 // thrown below 783 CLEAR_PENDING_EXCEPTION; 784 } 785 THROW_OOP(e()); 786 } 787 } 788 } 789 } 790 } 791 792 void InstanceKlass::initialize_impl(instanceKlassHandle this_k, TRAPS) { 793 // ensure outer VCC is initialized, possible some crafty code referred to VT 1st 794 if (this_k->is_derive_value_type() && this_k->is_value()) { 795 this_k->derive_value_type_klass()->initialize(CHECK); 796 } 797 // Make sure klass is linked (verified) before initialization 798 // A class could already be verified, since it has been reflected upon. 799 this_k->link_class(CHECK); 800 801 // DTRACE_CLASSINIT_PROBE(required, this_k(), -1); 802 803 bool wait = false; 804 805 // refer to the JVM book page 47 for description of steps 806 // Step 1 807 { 808 oop init_lock = this_k->init_lock(); 809 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 810 811 Thread *self = THREAD; // it's passed the current thread 812 813 // Step 2 814 // If we were to use wait() instead of waitInterruptibly() then 815 // we might end up throwing IE from link/symbol resolution sites 816 // that aren't expected to throw. This would wreak havoc. See 6320309. 817 while(this_k->is_being_initialized() && !this_k->is_reentrant_initialization(self)) { 818 wait = true; 819 ol.waitUninterruptibly(CHECK); 820 } 821 822 // Step 3 823 if (this_k->is_being_initialized() && this_k->is_reentrant_initialization(self)) { 824 // DTRACE_CLASSINIT_PROBE_WAIT(recursive, this_k(), -1,wait); 825 return; 826 } 827 828 // Step 4 829 if (this_k->is_initialized()) { 830 // DTRACE_CLASSINIT_PROBE_WAIT(concurrent, this_k(), -1,wait); 831 return; 832 } 833 834 // Step 5 835 if (this_k->is_in_error_state()) { 836 // DTRACE_CLASSINIT_PROBE_WAIT(erroneous, this_k(), -1,wait); 837 ResourceMark rm(THREAD); 838 const char* desc = "Could not initialize class "; 839 const char* className = this_k->external_name(); 840 size_t msglen = strlen(desc) + strlen(className) + 1; 841 char* message = NEW_RESOURCE_ARRAY(char, msglen); 842 if (NULL == message) { 843 // Out of memory: can't create detailed error message 844 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className); 845 } else { 846 jio_snprintf(message, msglen, "%s%s", desc, className); 847 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message); 848 } 849 } 850 851 // Step 6 852 this_k->set_init_state(being_initialized); 853 this_k->set_init_thread(self); 854 } 855 856 // Step 7 857 Klass* super_klass = this_k->super(); 858 if (super_klass != NULL && !this_k->is_interface() && super_klass->should_be_initialized()) { 859 super_klass->initialize(THREAD); 860 861 if (HAS_PENDING_EXCEPTION) { 862 Handle e(THREAD, PENDING_EXCEPTION); 863 CLEAR_PENDING_EXCEPTION; 864 { 865 EXCEPTION_MARK; 866 this_k->set_initialization_state_and_notify(initialization_error, THREAD); // Locks object, set state, and notify all waiting threads 867 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, superclass initialization error is thrown below 868 } 869 // DTRACE_CLASSINIT_PROBE_WAIT(super__failed, this_k(), -1,wait); 870 THROW_OOP(e()); 871 } 872 } 873 874 // If C is an interface that declares a non-abstract, non-static method, 875 // the initialization of a class (not an interface) that implements C directly or 876 // indirectly. 877 // Recursively initialize any superinterfaces that declare default methods 878 // Only need to recurse if has_default_methods which includes declaring and 879 // inheriting default methods 880 if (!this_k->is_interface() && this_k->has_default_methods()) { 881 this_k->initialize_super_interfaces(this_k, CHECK); 882 } 883 884 // Step 8 885 { 886 assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl"); 887 JavaThread* jt = (JavaThread*)THREAD; 888 // DTRACE_CLASSINIT_PROBE_WAIT(clinit, this_k(), -1,wait); 889 // Timer includes any side effects of class initialization (resolution, 890 // etc), but not recursive entry into call_class_initializer(). 891 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(), 892 ClassLoader::perf_class_init_selftime(), 893 ClassLoader::perf_classes_inited(), 894 jt->get_thread_stat()->perf_recursion_counts_addr(), 895 jt->get_thread_stat()->perf_timers_addr(), 896 PerfClassTraceTime::CLASS_CLINIT); 897 this_k->call_class_initializer(THREAD); 898 } 899 900 // Step 9 901 if (!HAS_PENDING_EXCEPTION) { 902 this_k->set_initialization_state_and_notify(fully_initialized, CHECK); 903 { ResourceMark rm(THREAD); 904 debug_only(this_k->vtable()->verify(tty, true);) 905 } 906 } 907 else { 908 // Step 10 and 11 909 Handle e(THREAD, PENDING_EXCEPTION); 910 CLEAR_PENDING_EXCEPTION; 911 // JVMTI has already reported the pending exception 912 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 913 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 914 { 915 EXCEPTION_MARK; 916 this_k->set_initialization_state_and_notify(initialization_error, THREAD); 917 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below 918 // JVMTI has already reported the pending exception 919 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 920 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 921 } 922 // DTRACE_CLASSINIT_PROBE_WAIT(error, this_k(), -1,wait); 923 if (e->is_a(SystemDictionary::Error_klass())) { 924 THROW_OOP(e()); 925 } else { 926 JavaCallArguments args(e); 927 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(), 928 vmSymbols::throwable_void_signature(), 929 &args); 930 } 931 } 932 // DTRACE_CLASSINIT_PROBE_WAIT(end, this_k(), -1,wait); 933 } 934 935 936 // Note: implementation moved to static method to expose the this pointer. 937 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) { 938 instanceKlassHandle kh(THREAD, this); 939 set_initialization_state_and_notify_impl(kh, state, CHECK); 940 } 941 942 void InstanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_k, ClassState state, TRAPS) { 943 oop init_lock = this_k->init_lock(); 944 ObjectLocker ol(init_lock, THREAD, init_lock != NULL); 945 this_k->set_init_state(state); 946 this_k->fence_and_clear_init_lock(); 947 ol.notify_all(CHECK); 948 } 949 950 // The embedded _implementor field can only record one implementor. 951 // When there are more than one implementors, the _implementor field 952 // is set to the interface Klass* itself. Following are the possible 953 // values for the _implementor field: 954 // NULL - no implementor 955 // implementor Klass* - one implementor 956 // self - more than one implementor 957 // 958 // The _implementor field only exists for interfaces. 959 void InstanceKlass::add_implementor(Klass* k) { 960 assert(Compile_lock->owned_by_self(), ""); 961 assert(is_interface(), "not interface"); 962 // Filter out my subinterfaces. 963 // (Note: Interfaces are never on the subklass list.) 964 if (InstanceKlass::cast(k)->is_interface()) return; 965 966 // Filter out subclasses whose supers already implement me. 967 // (Note: CHA must walk subclasses of direct implementors 968 // in order to locate indirect implementors.) 969 Klass* sk = k->super(); 970 if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this, false)) 971 // We only need to check one immediate superclass, since the 972 // implements_interface query looks at transitive_interfaces. 973 // Any supers of the super have the same (or fewer) transitive_interfaces. 974 return; 975 976 Klass* ik = implementor(); 977 if (ik == NULL) { 978 set_implementor(k); 979 } else if (ik != this) { 980 // There is already an implementor. Use itself as an indicator of 981 // more than one implementors. 982 set_implementor(this); 983 } 984 985 // The implementor also implements the transitive_interfaces 986 for (int index = 0; index < local_interfaces()->length(); index++) { 987 InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k); 988 } 989 } 990 991 void InstanceKlass::init_implementor() { 992 if (is_interface()) { 993 set_implementor(NULL); 994 } 995 } 996 997 998 void InstanceKlass::process_interfaces(Thread *thread) { 999 // link this class into the implementors list of every interface it implements 1000 for (int i = local_interfaces()->length() - 1; i >= 0; i--) { 1001 assert(local_interfaces()->at(i)->is_klass(), "must be a klass"); 1002 InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i)); 1003 assert(interf->is_interface(), "expected interface"); 1004 interf->add_implementor(this); 1005 } 1006 } 1007 1008 bool InstanceKlass::can_be_primary_super_slow() const { 1009 if (is_interface()) 1010 return false; 1011 else 1012 return Klass::can_be_primary_super_slow(); 1013 } 1014 1015 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots) { 1016 // The secondaries are the implemented interfaces. 1017 Array<Klass*>* interfaces = transitive_interfaces(); 1018 int num_secondaries = num_extra_slots + interfaces->length(); 1019 if (num_secondaries == 0) { 1020 // Must share this for correct bootstrapping! 1021 set_secondary_supers(Universe::the_empty_klass_array()); 1022 return NULL; 1023 } else if (num_extra_slots == 0) { 1024 // The secondary super list is exactly the same as the transitive interfaces. 1025 // Redefine classes has to be careful not to delete this! 1026 set_secondary_supers(interfaces); 1027 return NULL; 1028 } else { 1029 // Copy transitive interfaces to a temporary growable array to be constructed 1030 // into the secondary super list with extra slots. 1031 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length()); 1032 for (int i = 0; i < interfaces->length(); i++) { 1033 secondaries->push(interfaces->at(i)); 1034 } 1035 return secondaries; 1036 } 1037 } 1038 1039 bool InstanceKlass::compute_is_subtype_of(Klass* k) { 1040 if (k->is_interface()) { 1041 return implements_interface(k, true); 1042 } else { 1043 return Klass::compute_is_subtype_of(k); 1044 } 1045 } 1046 1047 bool InstanceKlass::implements_interface(Klass* k, bool search_extras) const { 1048 if (this == k) return true; 1049 assert(k->is_interface(), "should be an interface class"); 1050 for (int i = 0; i < transitive_interfaces()->length(); i++) { 1051 if (transitive_interfaces()->at(i) == k) { 1052 return true; 1053 } 1054 } 1055 if (!(search_extras && EnableExtraSuper)) 1056 return false; 1057 1058 for (InstanceKlass* exs = extra_super(); exs != NULL; exs = exs->extra_super()) { 1059 if (exs->implements_interface(k, false)) 1060 return true; 1061 } 1062 return false; 1063 } 1064 1065 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const { 1066 // Verify direct super interface 1067 if (this == k) return true; 1068 assert(k->is_interface(), "should be an interface class"); 1069 for (int i = 0; i < local_interfaces()->length(); i++) { 1070 if (local_interfaces()->at(i) == k) { 1071 return true; 1072 } 1073 } 1074 return false; 1075 } 1076 1077 bool InstanceKlass::can_be_used_as_extra_super(AccessFlags access_flags, 1078 Array<Method*>* methods, 1079 bool has_default_methods, 1080 Array<Klass*>* transitive_interfaces) { 1081 if (!EnableExtraSuper) return false; 1082 if (!access_flags.is_interface()) return false; 1083 bool force_itable = false; 1084 // Look for any implementation methods anywhere in this interface. 1085 if (!has_default_methods) { 1086 for (int i = 0, len = transitive_interfaces->length(); i < len; i++) { 1087 if (InstanceKlass::cast(transitive_interfaces->at(i))->has_default_methods()) { 1088 has_default_methods = true; 1089 break; 1090 } 1091 } 1092 } 1093 if (has_default_methods) 1094 force_itable = true; 1095 // We could make this logic more selective if we want. 1096 return force_itable; 1097 } 1098 1099 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) { 1100 if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException()); 1101 if (length > arrayOopDesc::max_array_length(T_OBJECT)) { 1102 report_java_out_of_memory("Requested array size exceeds VM limit"); 1103 JvmtiExport::post_array_size_exhausted(); 1104 THROW_OOP_0(Universe::out_of_memory_error_array_size()); 1105 } 1106 int size = objArrayOopDesc::object_size(length); 1107 Klass* ak = array_klass(n, CHECK_NULL); 1108 KlassHandle h_ak (THREAD, ak); 1109 objArrayOop o = 1110 (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL); 1111 return o; 1112 } 1113 1114 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) { 1115 if (TraceFinalizerRegistration) { 1116 tty->print("Registered "); 1117 i->print_value_on(tty); 1118 tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i)); 1119 } 1120 instanceHandle h_i(THREAD, i); 1121 // Pass the handle as argument, JavaCalls::call expects oop as jobjects 1122 JavaValue result(T_VOID); 1123 JavaCallArguments args(h_i); 1124 methodHandle mh (THREAD, Universe::finalizer_register_method()); 1125 JavaCalls::call(&result, mh, &args, CHECK_NULL); 1126 return h_i(); 1127 } 1128 1129 instanceOop InstanceKlass::allocate_instance(TRAPS) { 1130 bool has_finalizer_flag = has_finalizer(); // Query before possible GC 1131 int size = size_helper(); // Query before forming handle. 1132 1133 KlassHandle h_k(THREAD, this); 1134 1135 instanceOop i; 1136 1137 i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL); 1138 if (has_finalizer_flag && !RegisterFinalizersAtInit) { 1139 i = register_finalizer(i, CHECK_NULL); 1140 } 1141 return i; 1142 } 1143 1144 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) { 1145 if (is_interface() || is_abstract()) { 1146 ResourceMark rm(THREAD); 1147 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError() 1148 : vmSymbols::java_lang_InstantiationException(), external_name()); 1149 } 1150 if (this == SystemDictionary::Class_klass()) { 1151 ResourceMark rm(THREAD); 1152 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError() 1153 : vmSymbols::java_lang_IllegalAccessException(), external_name()); 1154 } 1155 } 1156 1157 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) { 1158 instanceKlassHandle this_k(THREAD, this); 1159 return array_klass_impl(this_k, or_null, n, THREAD); 1160 } 1161 1162 Klass* InstanceKlass::array_klass_impl(instanceKlassHandle this_k, bool or_null, int n, TRAPS) { 1163 if (this_k->array_klasses() == NULL) { 1164 if (or_null) return NULL; 1165 1166 ResourceMark rm; 1167 JavaThread *jt = (JavaThread *)THREAD; 1168 { 1169 // Atomic creation of array_klasses 1170 MutexLocker mc(Compile_lock, THREAD); // for vtables 1171 MutexLocker ma(MultiArray_lock, THREAD); 1172 1173 // Check if update has already taken place 1174 if (this_k->array_klasses() == NULL) { 1175 Klass* k = ObjArrayKlass::allocate_objArray_klass(this_k->class_loader_data(), 1, this_k, CHECK_NULL); 1176 this_k->set_array_klasses(k); 1177 } 1178 } 1179 } 1180 // _this will always be set at this point 1181 ObjArrayKlass* oak = (ObjArrayKlass*)this_k->array_klasses(); 1182 if (or_null) { 1183 return oak->array_klass_or_null(n); 1184 } 1185 return oak->array_klass(n, THREAD); 1186 } 1187 1188 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) { 1189 return array_klass_impl(or_null, 1, THREAD); 1190 } 1191 1192 void InstanceKlass::call_class_initializer(TRAPS) { 1193 instanceKlassHandle ik (THREAD, this); 1194 call_class_initializer_impl(ik, THREAD); 1195 } 1196 1197 static int call_class_initializer_impl_counter = 0; // for debugging 1198 1199 Method* InstanceKlass::class_initializer() { 1200 Method* clinit = find_method( 1201 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature()); 1202 if (clinit != NULL && clinit->has_valid_initializer_flags()) { 1203 return clinit; 1204 } 1205 return NULL; 1206 } 1207 1208 void InstanceKlass::call_class_initializer_impl(instanceKlassHandle this_k, TRAPS) { 1209 if (ReplayCompiles && 1210 (ReplaySuppressInitializers == 1 || 1211 ReplaySuppressInitializers >= 2 && this_k->class_loader() != NULL)) { 1212 // Hide the existence of the initializer for the purpose of replaying the compile 1213 return; 1214 } 1215 1216 methodHandle h_method(THREAD, this_k->class_initializer()); 1217 assert(!this_k->is_initialized(), "we cannot initialize twice"); 1218 if (log_is_enabled(Info, classinit)) { 1219 ResourceMark rm; 1220 outputStream* log = LogHandle(classinit)::info_stream(); 1221 log->print("%d Initializing ", call_class_initializer_impl_counter++); 1222 this_k->name()->print_value_on(log); 1223 log->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this_k())); 1224 } 1225 if (h_method() != NULL) { 1226 JavaCallArguments args; // No arguments 1227 JavaValue result(T_VOID); 1228 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args) 1229 } 1230 } 1231 1232 1233 void InstanceKlass::mask_for(const methodHandle& method, int bci, 1234 InterpreterOopMap* entry_for) { 1235 // Dirty read, then double-check under a lock. 1236 if (_oop_map_cache == NULL) { 1237 // Otherwise, allocate a new one. 1238 MutexLocker x(OopMapCacheAlloc_lock); 1239 // First time use. Allocate a cache in C heap 1240 if (_oop_map_cache == NULL) { 1241 // Release stores from OopMapCache constructor before assignment 1242 // to _oop_map_cache. C++ compilers on ppc do not emit the 1243 // required memory barrier only because of the volatile 1244 // qualifier of _oop_map_cache. 1245 OrderAccess::release_store_ptr(&_oop_map_cache, new OopMapCache()); 1246 } 1247 } 1248 // _oop_map_cache is constant after init; lookup below does is own locking. 1249 _oop_map_cache->lookup(method, bci, entry_for); 1250 } 1251 1252 1253 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1254 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1255 Symbol* f_name = fs.name(); 1256 Symbol* f_sig = fs.signature(); 1257 if (f_name == name && f_sig == sig) { 1258 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1259 return true; 1260 } 1261 } 1262 return false; 1263 } 1264 1265 1266 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1267 const int n = local_interfaces()->length(); 1268 for (int i = 0; i < n; i++) { 1269 Klass* intf1 = local_interfaces()->at(i); 1270 assert(intf1->is_interface(), "just checking type"); 1271 // search for field in current interface 1272 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) { 1273 assert(fd->is_static(), "interface field must be static"); 1274 return intf1; 1275 } 1276 // search for field in direct superinterfaces 1277 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd); 1278 if (intf2 != NULL) return intf2; 1279 } 1280 // otherwise field lookup fails 1281 return NULL; 1282 } 1283 1284 1285 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1286 // search order according to newest JVM spec (5.4.3.2, p.167). 1287 // 1) search for field in current klass 1288 if (find_local_field(name, sig, fd)) { 1289 return const_cast<InstanceKlass*>(this); 1290 } 1291 // 2) search for field recursively in direct superinterfaces 1292 { Klass* intf = find_interface_field(name, sig, fd); 1293 if (intf != NULL) return intf; 1294 } 1295 // 3) apply field lookup recursively if superclass exists 1296 { Klass* supr = super(); 1297 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd); 1298 } 1299 // 4) otherwise field lookup fails 1300 return NULL; 1301 } 1302 1303 1304 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const { 1305 // search order according to newest JVM spec (5.4.3.2, p.167). 1306 // 1) search for field in current klass 1307 if (find_local_field(name, sig, fd)) { 1308 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this); 1309 } 1310 // 2) search for field recursively in direct superinterfaces 1311 if (is_static) { 1312 Klass* intf = find_interface_field(name, sig, fd); 1313 if (intf != NULL) return intf; 1314 } 1315 // 3) apply field lookup recursively if superclass exists 1316 { Klass* supr = super(); 1317 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd); 1318 } 1319 // 4) otherwise field lookup fails 1320 return NULL; 1321 } 1322 1323 1324 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1325 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1326 if (fs.offset() == offset) { 1327 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1328 if (fd->is_static() == is_static) return true; 1329 } 1330 } 1331 return false; 1332 } 1333 1334 1335 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1336 Klass* klass = const_cast<InstanceKlass*>(this); 1337 while (klass != NULL) { 1338 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) { 1339 return true; 1340 } 1341 klass = klass->super(); 1342 } 1343 return false; 1344 } 1345 1346 1347 void InstanceKlass::methods_do(void f(Method* method)) { 1348 // Methods aren't stable until they are loaded. This can be read outside 1349 // a lock through the ClassLoaderData for profiling 1350 if (!is_loaded()) { 1351 return; 1352 } 1353 1354 int len = methods()->length(); 1355 for (int index = 0; index < len; index++) { 1356 Method* m = methods()->at(index); 1357 assert(m->is_method(), "must be method"); 1358 f(m); 1359 } 1360 } 1361 1362 1363 void InstanceKlass::do_local_static_fields(FieldClosure* cl) { 1364 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1365 if (fs.access_flags().is_static()) { 1366 fieldDescriptor& fd = fs.field_descriptor(); 1367 cl->do_field(&fd); 1368 } 1369 } 1370 } 1371 1372 1373 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) { 1374 instanceKlassHandle h_this(THREAD, this); 1375 do_local_static_fields_impl(h_this, f, mirror, CHECK); 1376 } 1377 1378 1379 void InstanceKlass::do_local_static_fields_impl(instanceKlassHandle this_k, 1380 void f(fieldDescriptor* fd, Handle, TRAPS), Handle mirror, TRAPS) { 1381 for (JavaFieldStream fs(this_k()); !fs.done(); fs.next()) { 1382 if (fs.access_flags().is_static()) { 1383 fieldDescriptor& fd = fs.field_descriptor(); 1384 f(&fd, mirror, CHECK); 1385 } 1386 } 1387 } 1388 1389 1390 static int compare_fields_by_offset(int* a, int* b) { 1391 return a[0] - b[0]; 1392 } 1393 1394 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) { 1395 InstanceKlass* super = superklass(); 1396 if (super != NULL) { 1397 super->do_nonstatic_fields(cl); 1398 } 1399 fieldDescriptor fd; 1400 int length = java_fields_count(); 1401 // In DebugInfo nonstatic fields are sorted by offset. 1402 int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass); 1403 int j = 0; 1404 for (int i = 0; i < length; i += 1) { 1405 fd.reinitialize(this, i); 1406 if (!fd.is_static()) { 1407 fields_sorted[j + 0] = fd.offset(); 1408 fields_sorted[j + 1] = i; 1409 j += 2; 1410 } 1411 } 1412 if (j > 0) { 1413 length = j; 1414 // _sort_Fn is defined in growableArray.hpp. 1415 qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset); 1416 for (int i = 0; i < length; i += 2) { 1417 fd.reinitialize(this, fields_sorted[i + 1]); 1418 assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields"); 1419 cl->do_field(&fd); 1420 } 1421 } 1422 FREE_C_HEAP_ARRAY(int, fields_sorted); 1423 } 1424 1425 1426 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) { 1427 if (array_klasses() != NULL) 1428 ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD); 1429 } 1430 1431 void InstanceKlass::array_klasses_do(void f(Klass* k)) { 1432 if (array_klasses() != NULL) 1433 ArrayKlass::cast(array_klasses())->array_klasses_do(f); 1434 } 1435 1436 #ifdef ASSERT 1437 static int linear_search(const Array<Method*>* methods, 1438 const Symbol* name, 1439 const Symbol* signature) { 1440 const int len = methods->length(); 1441 for (int index = 0; index < len; index++) { 1442 const Method* const m = methods->at(index); 1443 assert(m->is_method(), "must be method"); 1444 if (m->signature() == signature && m->name() == name) { 1445 return index; 1446 } 1447 } 1448 return -1; 1449 } 1450 #endif 1451 1452 static int binary_search(const Array<Method*>* methods, const Symbol* name) { 1453 int len = methods->length(); 1454 // methods are sorted, so do binary search 1455 int l = 0; 1456 int h = len - 1; 1457 while (l <= h) { 1458 int mid = (l + h) >> 1; 1459 Method* m = methods->at(mid); 1460 assert(m->is_method(), "must be method"); 1461 int res = m->name()->fast_compare(name); 1462 if (res == 0) { 1463 return mid; 1464 } else if (res < 0) { 1465 l = mid + 1; 1466 } else { 1467 h = mid - 1; 1468 } 1469 } 1470 return -1; 1471 } 1472 1473 // find_method looks up the name/signature in the local methods array 1474 Method* InstanceKlass::find_method(const Symbol* name, 1475 const Symbol* signature) const { 1476 return find_method_impl(name, signature, find_overpass, find_static, find_private); 1477 } 1478 1479 Method* InstanceKlass::find_method_impl(const Symbol* name, 1480 const Symbol* signature, 1481 OverpassLookupMode overpass_mode, 1482 StaticLookupMode static_mode, 1483 PrivateLookupMode private_mode) const { 1484 return InstanceKlass::find_method_impl(methods(), 1485 name, 1486 signature, 1487 overpass_mode, 1488 static_mode, 1489 private_mode); 1490 } 1491 1492 // find_instance_method looks up the name/signature in the local methods array 1493 // and skips over static methods 1494 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods, 1495 const Symbol* name, 1496 const Symbol* signature) { 1497 Method* const meth = InstanceKlass::find_method_impl(methods, 1498 name, 1499 signature, 1500 find_overpass, 1501 skip_static, 1502 find_private); 1503 assert(((meth == NULL) || !meth->is_static()), 1504 "find_instance_method should have skipped statics"); 1505 return meth; 1506 } 1507 1508 // find_instance_method looks up the name/signature in the local methods array 1509 // and skips over static methods 1510 Method* InstanceKlass::find_instance_method(const Symbol* name, const Symbol* signature) const { 1511 return InstanceKlass::find_instance_method(methods(), name, signature); 1512 } 1513 1514 // Find looks up the name/signature in the local methods array 1515 // and filters on the overpass, static and private flags 1516 // This returns the first one found 1517 // note that the local methods array can have up to one overpass, one static 1518 // and one instance (private or not) with the same name/signature 1519 Method* InstanceKlass::find_local_method(const Symbol* name, 1520 const Symbol* signature, 1521 OverpassLookupMode overpass_mode, 1522 StaticLookupMode static_mode, 1523 PrivateLookupMode private_mode) const { 1524 return InstanceKlass::find_method_impl(methods(), 1525 name, 1526 signature, 1527 overpass_mode, 1528 static_mode, 1529 private_mode); 1530 } 1531 1532 // Find looks up the name/signature in the local methods array 1533 // and filters on the overpass, static and private flags 1534 // This returns the first one found 1535 // note that the local methods array can have up to one overpass, one static 1536 // and one instance (private or not) with the same name/signature 1537 Method* InstanceKlass::find_local_method(const Array<Method*>* methods, 1538 const Symbol* name, 1539 const Symbol* signature, 1540 OverpassLookupMode overpass_mode, 1541 StaticLookupMode static_mode, 1542 PrivateLookupMode private_mode) { 1543 return InstanceKlass::find_method_impl(methods, 1544 name, 1545 signature, 1546 overpass_mode, 1547 static_mode, 1548 private_mode); 1549 } 1550 1551 Method* InstanceKlass::find_method(const Array<Method*>* methods, 1552 const Symbol* name, 1553 const Symbol* signature) { 1554 return InstanceKlass::find_method_impl(methods, 1555 name, 1556 signature, 1557 find_overpass, 1558 find_static, 1559 find_private); 1560 } 1561 1562 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods, 1563 const Symbol* name, 1564 const Symbol* signature, 1565 OverpassLookupMode overpass_mode, 1566 StaticLookupMode static_mode, 1567 PrivateLookupMode private_mode) { 1568 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode); 1569 return hit >= 0 ? methods->at(hit): NULL; 1570 } 1571 1572 // true if method matches signature and conforms to skipping_X conditions. 1573 static bool method_matches(const Method* m, 1574 const Symbol* signature, 1575 bool skipping_overpass, 1576 bool skipping_static, 1577 bool skipping_private) { 1578 return ((m->signature() == signature) && 1579 (!skipping_overpass || !m->is_overpass()) && 1580 (!skipping_static || !m->is_static()) && 1581 (!skipping_private || !m->is_private())); 1582 } 1583 1584 // Used directly for default_methods to find the index into the 1585 // default_vtable_indices, and indirectly by find_method 1586 // find_method_index looks in the local methods array to return the index 1587 // of the matching name/signature. If, overpass methods are being ignored, 1588 // the search continues to find a potential non-overpass match. This capability 1589 // is important during method resolution to prefer a static method, for example, 1590 // over an overpass method. 1591 // There is the possibility in any _method's array to have the same name/signature 1592 // for a static method, an overpass method and a local instance method 1593 // To correctly catch a given method, the search criteria may need 1594 // to explicitly skip the other two. For local instance methods, it 1595 // is often necessary to skip private methods 1596 int InstanceKlass::find_method_index(const Array<Method*>* methods, 1597 const Symbol* name, 1598 const Symbol* signature, 1599 OverpassLookupMode overpass_mode, 1600 StaticLookupMode static_mode, 1601 PrivateLookupMode private_mode) { 1602 const bool skipping_overpass = (overpass_mode == skip_overpass); 1603 const bool skipping_static = (static_mode == skip_static); 1604 const bool skipping_private = (private_mode == skip_private); 1605 const int hit = binary_search(methods, name); 1606 if (hit != -1) { 1607 const Method* const m = methods->at(hit); 1608 1609 // Do linear search to find matching signature. First, quick check 1610 // for common case, ignoring overpasses if requested. 1611 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1612 return hit; 1613 } 1614 1615 // search downwards through overloaded methods 1616 int i; 1617 for (i = hit - 1; i >= 0; --i) { 1618 const Method* const m = methods->at(i); 1619 assert(m->is_method(), "must be method"); 1620 if (m->name() != name) { 1621 break; 1622 } 1623 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1624 return i; 1625 } 1626 } 1627 // search upwards 1628 for (i = hit + 1; i < methods->length(); ++i) { 1629 const Method* const m = methods->at(i); 1630 assert(m->is_method(), "must be method"); 1631 if (m->name() != name) { 1632 break; 1633 } 1634 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1635 return i; 1636 } 1637 } 1638 // not found 1639 #ifdef ASSERT 1640 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 : 1641 linear_search(methods, name, signature); 1642 assert(-1 == index, "binary search should have found entry %d", index); 1643 #endif 1644 } 1645 return -1; 1646 } 1647 1648 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const { 1649 return find_method_by_name(methods(), name, end); 1650 } 1651 1652 int InstanceKlass::find_method_by_name(const Array<Method*>* methods, 1653 const Symbol* name, 1654 int* end_ptr) { 1655 assert(end_ptr != NULL, "just checking"); 1656 int start = binary_search(methods, name); 1657 int end = start + 1; 1658 if (start != -1) { 1659 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start; 1660 while (end < methods->length() && (methods->at(end))->name() == name) ++end; 1661 *end_ptr = end; 1662 return start; 1663 } 1664 return -1; 1665 } 1666 1667 // uncached_lookup_method searches both the local class methods array and all 1668 // superclasses methods arrays, skipping any overpass methods in superclasses. 1669 Method* InstanceKlass::uncached_lookup_method(const Symbol* name, 1670 const Symbol* signature, 1671 OverpassLookupMode overpass_mode) const { 1672 OverpassLookupMode overpass_local_mode = overpass_mode; 1673 const Klass* klass = this; 1674 while (klass != NULL) { 1675 Method* const method = InstanceKlass::cast(klass)->find_method_impl(name, 1676 signature, 1677 overpass_local_mode, 1678 find_static, 1679 find_private); 1680 if (method != NULL) { 1681 return method; 1682 } 1683 klass = klass->super(); 1684 overpass_local_mode = skip_overpass; // Always ignore overpass methods in superclasses 1685 } 1686 return NULL; 1687 } 1688 1689 #ifdef ASSERT 1690 // search through class hierarchy and return true if this class or 1691 // one of the superclasses was redefined 1692 bool InstanceKlass::has_redefined_this_or_super() const { 1693 const Klass* klass = this; 1694 while (klass != NULL) { 1695 if (InstanceKlass::cast(klass)->has_been_redefined()) { 1696 return true; 1697 } 1698 klass = klass->super(); 1699 } 1700 return false; 1701 } 1702 #endif 1703 1704 // lookup a method in the default methods list then in all transitive interfaces 1705 // Do NOT return private or static methods 1706 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name, 1707 Symbol* signature) const { 1708 Method* m = NULL; 1709 if (default_methods() != NULL) { 1710 m = find_method(default_methods(), name, signature); 1711 } 1712 // Look up interfaces 1713 if (m == NULL) { 1714 m = lookup_method_in_all_interfaces(name, signature, find_defaults); 1715 } 1716 return m; 1717 } 1718 1719 // lookup a method in all the interfaces that this class implements 1720 // Do NOT return private or static methods, new in JDK8 which are not externally visible 1721 // They should only be found in the initial InterfaceMethodRef 1722 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name, 1723 Symbol* signature, 1724 DefaultsLookupMode defaults_mode) const { 1725 Array<Klass*>* all_ifs = transitive_interfaces(); 1726 int num_ifs = all_ifs->length(); 1727 InstanceKlass *ik = NULL; 1728 for (int i = 0; i < num_ifs; i++) { 1729 ik = InstanceKlass::cast(all_ifs->at(i)); 1730 Method* m = ik->lookup_method(name, signature); 1731 if (m != NULL && m->is_public() && !m->is_static() && 1732 ((defaults_mode != skip_defaults) || !m->is_default_method())) { 1733 return m; 1734 } 1735 } 1736 return NULL; 1737 } 1738 1739 /* jni_id_for_impl for jfieldIds only */ 1740 JNIid* InstanceKlass::jni_id_for_impl(instanceKlassHandle this_k, int offset) { 1741 MutexLocker ml(JfieldIdCreation_lock); 1742 // Retry lookup after we got the lock 1743 JNIid* probe = this_k->jni_ids() == NULL ? NULL : this_k->jni_ids()->find(offset); 1744 if (probe == NULL) { 1745 // Slow case, allocate new static field identifier 1746 probe = new JNIid(this_k(), offset, this_k->jni_ids()); 1747 this_k->set_jni_ids(probe); 1748 } 1749 return probe; 1750 } 1751 1752 1753 /* jni_id_for for jfieldIds only */ 1754 JNIid* InstanceKlass::jni_id_for(int offset) { 1755 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1756 if (probe == NULL) { 1757 probe = jni_id_for_impl(this, offset); 1758 } 1759 return probe; 1760 } 1761 1762 u2 InstanceKlass::enclosing_method_data(int offset) const { 1763 const Array<jushort>* const inner_class_list = inner_classes(); 1764 if (inner_class_list == NULL) { 1765 return 0; 1766 } 1767 const int length = inner_class_list->length(); 1768 if (length % inner_class_next_offset == 0) { 1769 return 0; 1770 } 1771 const int index = length - enclosing_method_attribute_size; 1772 assert(offset < enclosing_method_attribute_size, "invalid offset"); 1773 return inner_class_list->at(index + offset); 1774 } 1775 1776 void InstanceKlass::set_enclosing_method_indices(u2 class_index, 1777 u2 method_index) { 1778 Array<jushort>* inner_class_list = inner_classes(); 1779 assert (inner_class_list != NULL, "_inner_classes list is not set up"); 1780 int length = inner_class_list->length(); 1781 if (length % inner_class_next_offset == enclosing_method_attribute_size) { 1782 int index = length - enclosing_method_attribute_size; 1783 inner_class_list->at_put( 1784 index + enclosing_method_class_index_offset, class_index); 1785 inner_class_list->at_put( 1786 index + enclosing_method_method_index_offset, method_index); 1787 } 1788 } 1789 1790 // Lookup or create a jmethodID. 1791 // This code is called by the VMThread and JavaThreads so the 1792 // locking has to be done very carefully to avoid deadlocks 1793 // and/or other cache consistency problems. 1794 // 1795 jmethodID InstanceKlass::get_jmethod_id(instanceKlassHandle ik_h, const methodHandle& method_h) { 1796 size_t idnum = (size_t)method_h->method_idnum(); 1797 jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire(); 1798 size_t length = 0; 1799 jmethodID id = NULL; 1800 1801 // We use a double-check locking idiom here because this cache is 1802 // performance sensitive. In the normal system, this cache only 1803 // transitions from NULL to non-NULL which is safe because we use 1804 // release_set_methods_jmethod_ids() to advertise the new cache. 1805 // A partially constructed cache should never be seen by a racing 1806 // thread. We also use release_store_ptr() to save a new jmethodID 1807 // in the cache so a partially constructed jmethodID should never be 1808 // seen either. Cache reads of existing jmethodIDs proceed without a 1809 // lock, but cache writes of a new jmethodID requires uniqueness and 1810 // creation of the cache itself requires no leaks so a lock is 1811 // generally acquired in those two cases. 1812 // 1813 // If the RedefineClasses() API has been used, then this cache can 1814 // grow and we'll have transitions from non-NULL to bigger non-NULL. 1815 // Cache creation requires no leaks and we require safety between all 1816 // cache accesses and freeing of the old cache so a lock is generally 1817 // acquired when the RedefineClasses() API has been used. 1818 1819 if (jmeths != NULL) { 1820 // the cache already exists 1821 if (!ik_h->idnum_can_increment()) { 1822 // the cache can't grow so we can just get the current values 1823 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1824 } else { 1825 // cache can grow so we have to be more careful 1826 if (Threads::number_of_threads() == 0 || 1827 SafepointSynchronize::is_at_safepoint()) { 1828 // we're single threaded or at a safepoint - no locking needed 1829 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1830 } else { 1831 MutexLocker ml(JmethodIdCreation_lock); 1832 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 1833 } 1834 } 1835 } 1836 // implied else: 1837 // we need to allocate a cache so default length and id values are good 1838 1839 if (jmeths == NULL || // no cache yet 1840 length <= idnum || // cache is too short 1841 id == NULL) { // cache doesn't contain entry 1842 1843 // This function can be called by the VMThread so we have to do all 1844 // things that might block on a safepoint before grabbing the lock. 1845 // Otherwise, we can deadlock with the VMThread or have a cache 1846 // consistency issue. These vars keep track of what we might have 1847 // to free after the lock is dropped. 1848 jmethodID to_dealloc_id = NULL; 1849 jmethodID* to_dealloc_jmeths = NULL; 1850 1851 // may not allocate new_jmeths or use it if we allocate it 1852 jmethodID* new_jmeths = NULL; 1853 if (length <= idnum) { 1854 // allocate a new cache that might be used 1855 size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count()); 1856 new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass); 1857 memset(new_jmeths, 0, (size+1)*sizeof(jmethodID)); 1858 // cache size is stored in element[0], other elements offset by one 1859 new_jmeths[0] = (jmethodID)size; 1860 } 1861 1862 // allocate a new jmethodID that might be used 1863 jmethodID new_id = NULL; 1864 if (method_h->is_old() && !method_h->is_obsolete()) { 1865 // The method passed in is old (but not obsolete), we need to use the current version 1866 Method* current_method = ik_h->method_with_idnum((int)idnum); 1867 assert(current_method != NULL, "old and but not obsolete, so should exist"); 1868 new_id = Method::make_jmethod_id(ik_h->class_loader_data(), current_method); 1869 } else { 1870 // It is the current version of the method or an obsolete method, 1871 // use the version passed in 1872 new_id = Method::make_jmethod_id(ik_h->class_loader_data(), method_h()); 1873 } 1874 1875 if (Threads::number_of_threads() == 0 || 1876 SafepointSynchronize::is_at_safepoint()) { 1877 // we're single threaded or at a safepoint - no locking needed 1878 id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths, 1879 &to_dealloc_id, &to_dealloc_jmeths); 1880 } else { 1881 MutexLocker ml(JmethodIdCreation_lock); 1882 id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths, 1883 &to_dealloc_id, &to_dealloc_jmeths); 1884 } 1885 1886 // The lock has been dropped so we can free resources. 1887 // Free up either the old cache or the new cache if we allocated one. 1888 if (to_dealloc_jmeths != NULL) { 1889 FreeHeap(to_dealloc_jmeths); 1890 } 1891 // free up the new ID since it wasn't needed 1892 if (to_dealloc_id != NULL) { 1893 Method::destroy_jmethod_id(ik_h->class_loader_data(), to_dealloc_id); 1894 } 1895 } 1896 return id; 1897 } 1898 1899 // Figure out how many jmethodIDs haven't been allocated, and make 1900 // sure space for them is pre-allocated. This makes getting all 1901 // method ids much, much faster with classes with more than 8 1902 // methods, and has a *substantial* effect on performance with jvmti 1903 // code that loads all jmethodIDs for all classes. 1904 void InstanceKlass::ensure_space_for_methodids(int start_offset) { 1905 int new_jmeths = 0; 1906 int length = methods()->length(); 1907 for (int index = start_offset; index < length; index++) { 1908 Method* m = methods()->at(index); 1909 jmethodID id = m->find_jmethod_id_or_null(); 1910 if (id == NULL) { 1911 new_jmeths++; 1912 } 1913 } 1914 if (new_jmeths != 0) { 1915 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths); 1916 } 1917 } 1918 1919 // Common code to fetch the jmethodID from the cache or update the 1920 // cache with the new jmethodID. This function should never do anything 1921 // that causes the caller to go to a safepoint or we can deadlock with 1922 // the VMThread or have cache consistency issues. 1923 // 1924 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update( 1925 instanceKlassHandle ik_h, size_t idnum, jmethodID new_id, 1926 jmethodID* new_jmeths, jmethodID* to_dealloc_id_p, 1927 jmethodID** to_dealloc_jmeths_p) { 1928 assert(new_id != NULL, "sanity check"); 1929 assert(to_dealloc_id_p != NULL, "sanity check"); 1930 assert(to_dealloc_jmeths_p != NULL, "sanity check"); 1931 assert(Threads::number_of_threads() == 0 || 1932 SafepointSynchronize::is_at_safepoint() || 1933 JmethodIdCreation_lock->owned_by_self(), "sanity check"); 1934 1935 // reacquire the cache - we are locked, single threaded or at a safepoint 1936 jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire(); 1937 jmethodID id = NULL; 1938 size_t length = 0; 1939 1940 if (jmeths == NULL || // no cache yet 1941 (length = (size_t)jmeths[0]) <= idnum) { // cache is too short 1942 if (jmeths != NULL) { 1943 // copy any existing entries from the old cache 1944 for (size_t index = 0; index < length; index++) { 1945 new_jmeths[index+1] = jmeths[index+1]; 1946 } 1947 *to_dealloc_jmeths_p = jmeths; // save old cache for later delete 1948 } 1949 ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths); 1950 } else { 1951 // fetch jmethodID (if any) from the existing cache 1952 id = jmeths[idnum+1]; 1953 *to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete 1954 } 1955 if (id == NULL) { 1956 // No matching jmethodID in the existing cache or we have a new 1957 // cache or we just grew the cache. This cache write is done here 1958 // by the first thread to win the foot race because a jmethodID 1959 // needs to be unique once it is generally available. 1960 id = new_id; 1961 1962 // The jmethodID cache can be read while unlocked so we have to 1963 // make sure the new jmethodID is complete before installing it 1964 // in the cache. 1965 OrderAccess::release_store_ptr(&jmeths[idnum+1], id); 1966 } else { 1967 *to_dealloc_id_p = new_id; // save new id for later delete 1968 } 1969 return id; 1970 } 1971 1972 1973 // Common code to get the jmethodID cache length and the jmethodID 1974 // value at index idnum if there is one. 1975 // 1976 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache, 1977 size_t idnum, size_t *length_p, jmethodID* id_p) { 1978 assert(cache != NULL, "sanity check"); 1979 assert(length_p != NULL, "sanity check"); 1980 assert(id_p != NULL, "sanity check"); 1981 1982 // cache size is stored in element[0], other elements offset by one 1983 *length_p = (size_t)cache[0]; 1984 if (*length_p <= idnum) { // cache is too short 1985 *id_p = NULL; 1986 } else { 1987 *id_p = cache[idnum+1]; // fetch jmethodID (if any) 1988 } 1989 } 1990 1991 1992 // Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles 1993 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) { 1994 size_t idnum = (size_t)method->method_idnum(); 1995 jmethodID* jmeths = methods_jmethod_ids_acquire(); 1996 size_t length; // length assigned as debugging crumb 1997 jmethodID id = NULL; 1998 if (jmeths != NULL && // If there is a cache 1999 (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough, 2000 id = jmeths[idnum+1]; // Look up the id (may be NULL) 2001 } 2002 return id; 2003 } 2004 2005 inline DependencyContext InstanceKlass::dependencies() { 2006 DependencyContext dep_context(&_dep_context); 2007 return dep_context; 2008 } 2009 2010 int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) { 2011 return dependencies().mark_dependent_nmethods(changes); 2012 } 2013 2014 void InstanceKlass::add_dependent_nmethod(nmethod* nm) { 2015 dependencies().add_dependent_nmethod(nm); 2016 } 2017 2018 void InstanceKlass::remove_dependent_nmethod(nmethod* nm, bool delete_immediately) { 2019 dependencies().remove_dependent_nmethod(nm, delete_immediately); 2020 } 2021 2022 #ifndef PRODUCT 2023 void InstanceKlass::print_dependent_nmethods(bool verbose) { 2024 dependencies().print_dependent_nmethods(verbose); 2025 } 2026 2027 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) { 2028 return dependencies().is_dependent_nmethod(nm); 2029 } 2030 #endif //PRODUCT 2031 2032 void InstanceKlass::clean_weak_instanceklass_links(BoolObjectClosure* is_alive) { 2033 clean_implementors_list(is_alive); 2034 clean_method_data(is_alive); 2035 2036 // Since GC iterates InstanceKlasses sequentially, it is safe to remove stale entries here. 2037 DependencyContext dep_context(&_dep_context); 2038 dep_context.expunge_stale_entries(); 2039 } 2040 2041 void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) { 2042 assert(class_loader_data()->is_alive(is_alive), "this klass should be live"); 2043 if (is_interface()) { 2044 if (ClassUnloading) { 2045 Klass* impl = implementor(); 2046 if (impl != NULL) { 2047 if (!impl->is_loader_alive(is_alive)) { 2048 // remove this guy 2049 Klass** klass = adr_implementor(); 2050 assert(klass != NULL, "null klass"); 2051 if (klass != NULL) { 2052 *klass = NULL; 2053 } 2054 } 2055 } 2056 } 2057 } 2058 } 2059 2060 void InstanceKlass::clean_method_data(BoolObjectClosure* is_alive) { 2061 for (int m = 0; m < methods()->length(); m++) { 2062 MethodData* mdo = methods()->at(m)->method_data(); 2063 if (mdo != NULL) { 2064 mdo->clean_method_data(is_alive); 2065 } 2066 } 2067 } 2068 2069 2070 static void remove_unshareable_in_class(Klass* k) { 2071 // remove klass's unshareable info 2072 k->remove_unshareable_info(); 2073 } 2074 2075 void InstanceKlass::remove_unshareable_info() { 2076 Klass::remove_unshareable_info(); 2077 // Unlink the class 2078 if (is_linked()) { 2079 unlink_class(); 2080 } 2081 init_implementor(); 2082 2083 constants()->remove_unshareable_info(); 2084 2085 assert(_dep_context == DependencyContext::EMPTY, "dependency context is not shareable"); 2086 2087 for (int i = 0; i < methods()->length(); i++) { 2088 Method* m = methods()->at(i); 2089 m->remove_unshareable_info(); 2090 } 2091 2092 // do array classes also. 2093 array_klasses_do(remove_unshareable_in_class); 2094 } 2095 2096 static void restore_unshareable_in_class(Klass* k, TRAPS) { 2097 // Array classes have null protection domain. 2098 // --> see ArrayKlass::complete_create_array_klass() 2099 k->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK); 2100 } 2101 2102 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) { 2103 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK); 2104 instanceKlassHandle ik(THREAD, this); 2105 2106 Array<Method*>* methods = ik->methods(); 2107 int num_methods = methods->length(); 2108 for (int index2 = 0; index2 < num_methods; ++index2) { 2109 methodHandle m(THREAD, methods->at(index2)); 2110 m->restore_unshareable_info(CHECK); 2111 } 2112 if (JvmtiExport::has_redefined_a_class()) { 2113 // Reinitialize vtable because RedefineClasses may have changed some 2114 // entries in this vtable for super classes so the CDS vtable might 2115 // point to old or obsolete entries. RedefineClasses doesn't fix up 2116 // vtables in the shared system dictionary, only the main one. 2117 // It also redefines the itable too so fix that too. 2118 ResourceMark rm(THREAD); 2119 ik->vtable()->initialize_vtable(false, CHECK); 2120 ik->itable()->initialize_itable(false, CHECK); 2121 } 2122 2123 // restore constant pool resolved references 2124 ik->constants()->restore_unshareable_info(CHECK); 2125 2126 ik->array_klasses_do(restore_unshareable_in_class, CHECK); 2127 } 2128 2129 // returns true IFF is_in_error_state() has been changed as a result of this call. 2130 bool InstanceKlass::check_sharing_error_state() { 2131 assert(DumpSharedSpaces, "should only be called during dumping"); 2132 bool old_state = is_in_error_state(); 2133 2134 if (!is_in_error_state()) { 2135 bool bad = false; 2136 for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) { 2137 if (sup->is_in_error_state()) { 2138 bad = true; 2139 break; 2140 } 2141 } 2142 if (!bad) { 2143 Array<Klass*>* interfaces = transitive_interfaces(); 2144 for (int i = 0; i < interfaces->length(); i++) { 2145 Klass* iface = interfaces->at(i); 2146 if (InstanceKlass::cast(iface)->is_in_error_state()) { 2147 bad = true; 2148 break; 2149 } 2150 } 2151 } 2152 2153 if (bad) { 2154 set_in_error_state(); 2155 } 2156 } 2157 2158 return (old_state != is_in_error_state()); 2159 } 2160 2161 static void clear_all_breakpoints(Method* m) { 2162 m->clear_all_breakpoints(); 2163 } 2164 2165 2166 void InstanceKlass::notify_unload_class(InstanceKlass* ik) { 2167 // notify the debugger 2168 if (JvmtiExport::should_post_class_unload()) { 2169 JvmtiExport::post_class_unload(ik); 2170 } 2171 2172 // notify ClassLoadingService of class unload 2173 ClassLoadingService::notify_class_unloaded(ik); 2174 } 2175 2176 void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) { 2177 // Clean up C heap 2178 ik->release_C_heap_structures(); 2179 ik->constants()->release_C_heap_structures(); 2180 } 2181 2182 void InstanceKlass::release_C_heap_structures() { 2183 2184 // Can't release the constant pool here because the constant pool can be 2185 // deallocated separately from the InstanceKlass for default methods and 2186 // redefine classes. 2187 2188 // Deallocate oop map cache 2189 if (_oop_map_cache != NULL) { 2190 delete _oop_map_cache; 2191 _oop_map_cache = NULL; 2192 } 2193 2194 // Deallocate JNI identifiers for jfieldIDs 2195 JNIid::deallocate(jni_ids()); 2196 set_jni_ids(NULL); 2197 2198 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2199 if (jmeths != (jmethodID*)NULL) { 2200 release_set_methods_jmethod_ids(NULL); 2201 FreeHeap(jmeths); 2202 } 2203 2204 // Deallocate MemberNameTable 2205 { 2206 Mutex* lock_or_null = SafepointSynchronize::is_at_safepoint() ? NULL : MemberNameTable_lock; 2207 MutexLockerEx ml(lock_or_null, Mutex::_no_safepoint_check_flag); 2208 MemberNameTable* mnt = member_names(); 2209 if (mnt != NULL) { 2210 delete mnt; 2211 set_member_names(NULL); 2212 } 2213 } 2214 2215 // Release dependencies. 2216 // It is desirable to use DC::remove_all_dependents() here, but, unfortunately, 2217 // it is not safe (see JDK-8143408). The problem is that the klass dependency 2218 // context can contain live dependencies, since there's a race between nmethod & 2219 // klass unloading. If the klass is dead when nmethod unloading happens, relevant 2220 // dependencies aren't removed from the context associated with the class (see 2221 // nmethod::flush_dependencies). It ends up during klass unloading as seemingly 2222 // live dependencies pointing to unloaded nmethods and causes a crash in 2223 // DC::remove_all_dependents() when it touches unloaded nmethod. 2224 dependencies().wipe(); 2225 2226 // Deallocate breakpoint records 2227 if (breakpoints() != 0x0) { 2228 methods_do(clear_all_breakpoints); 2229 assert(breakpoints() == 0x0, "should have cleared breakpoints"); 2230 } 2231 2232 // deallocate the cached class file 2233 if (_cached_class_file != NULL) { 2234 os::free(_cached_class_file); 2235 _cached_class_file = NULL; 2236 } 2237 2238 // Decrement symbol reference counts associated with the unloaded class. 2239 if (_name != NULL) _name->decrement_refcount(); 2240 // unreference array name derived from this class name (arrays of an unloaded 2241 // class can't be referenced anymore). 2242 if (_array_name != NULL) _array_name->decrement_refcount(); 2243 if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension); 2244 2245 assert(_total_instanceKlass_count >= 1, "Sanity check"); 2246 Atomic::dec(&_total_instanceKlass_count); 2247 } 2248 2249 void InstanceKlass::set_source_debug_extension(const char* array, int length) { 2250 if (array == NULL) { 2251 _source_debug_extension = NULL; 2252 } else { 2253 // Adding one to the attribute length in order to store a null terminator 2254 // character could cause an overflow because the attribute length is 2255 // already coded with an u4 in the classfile, but in practice, it's 2256 // unlikely to happen. 2257 assert((length+1) > length, "Overflow checking"); 2258 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass); 2259 for (int i = 0; i < length; i++) { 2260 sde[i] = array[i]; 2261 } 2262 sde[length] = '\0'; 2263 _source_debug_extension = sde; 2264 } 2265 } 2266 2267 address InstanceKlass::static_field_addr(int offset) { 2268 return (address)(offset + InstanceMirrorKlass::offset_of_static_fields() + cast_from_oop<intptr_t>(java_mirror())); 2269 } 2270 2271 2272 const char* InstanceKlass::signature_name() const { 2273 int hash_len = 0; 2274 char hash_buf[40]; 2275 2276 // If this is an anonymous class, append a hash to make the name unique 2277 if (is_anonymous()) { 2278 intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0; 2279 jio_snprintf(hash_buf, sizeof(hash_buf), "/" UINTX_FORMAT, (uintx)hash); 2280 hash_len = (int)strlen(hash_buf); 2281 } 2282 2283 // Get the internal name as a c string 2284 const char* src = (const char*) (name()->as_C_string()); 2285 const int src_length = (int)strlen(src); 2286 2287 char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3); 2288 2289 // Add L as type indicator 2290 int dest_index = 0; 2291 dest[dest_index++] = is_value_type_klass() ? 'Q' : 'L'; 2292 2293 // Add the actual class name 2294 for (int src_index = 0; src_index < src_length; ) { 2295 dest[dest_index++] = src[src_index++]; 2296 } 2297 2298 // If we have a hash, append it 2299 for (int hash_index = 0; hash_index < hash_len; ) { 2300 dest[dest_index++] = hash_buf[hash_index++]; 2301 } 2302 2303 // Add the semicolon and the NULL 2304 dest[dest_index++] = ';'; 2305 dest[dest_index] = '\0'; 2306 return dest; 2307 } 2308 2309 // different verisons of is_same_class_package 2310 bool InstanceKlass::is_same_class_package(const Klass* class2) const { 2311 const Klass* const class1 = (const Klass* const)this; 2312 oop classloader1 = InstanceKlass::cast(class1)->class_loader(); 2313 const Symbol* const classname1 = class1->name(); 2314 2315 if (class2->is_objArray_klass()) { 2316 class2 = ObjArrayKlass::cast(class2)->bottom_klass(); 2317 } 2318 oop classloader2; 2319 if (class2->is_instance_klass()) { 2320 classloader2 = InstanceKlass::cast(class2)->class_loader(); 2321 } else { 2322 assert(class2->is_typeArray_klass(), "should be type array"); 2323 classloader2 = NULL; 2324 } 2325 const Symbol* classname2 = class2->name(); 2326 2327 return InstanceKlass::is_same_class_package(classloader1, classname1, 2328 classloader2, classname2); 2329 } 2330 2331 bool InstanceKlass::is_same_class_package(oop other_class_loader, 2332 const Symbol* other_class_name) const { 2333 oop this_class_loader = class_loader(); 2334 const Symbol* const this_class_name = name(); 2335 2336 return InstanceKlass::is_same_class_package(this_class_loader, 2337 this_class_name, 2338 other_class_loader, 2339 other_class_name); 2340 } 2341 2342 // return true if two classes are in the same package, classloader 2343 // and classname information is enough to determine a class's package 2344 bool InstanceKlass::is_same_class_package(oop class_loader1, const Symbol* class_name1, 2345 oop class_loader2, const Symbol* class_name2) { 2346 if (class_loader1 != class_loader2) { 2347 return false; 2348 } else if (class_name1 == class_name2) { 2349 return true; // skip painful bytewise comparison 2350 } else { 2351 ResourceMark rm; 2352 2353 // The Symbol*'s are in UTF8 encoding. Since we only need to check explicitly 2354 // for ASCII characters ('/', 'L', '['), we can keep them in UTF8 encoding. 2355 // Otherwise, we just compare jbyte values between the strings. 2356 const jbyte *name1 = class_name1->base(); 2357 const jbyte *name2 = class_name2->base(); 2358 2359 const jbyte *last_slash1 = UTF8::strrchr(name1, class_name1->utf8_length(), '/'); 2360 const jbyte *last_slash2 = UTF8::strrchr(name2, class_name2->utf8_length(), '/'); 2361 2362 if ((last_slash1 == NULL) || (last_slash2 == NULL)) { 2363 // One of the two doesn't have a package. Only return true 2364 // if the other one also doesn't have a package. 2365 return last_slash1 == last_slash2; 2366 } else { 2367 // Skip over '['s 2368 if (*name1 == '[') { 2369 do { 2370 name1++; 2371 } while (*name1 == '['); 2372 if (*name1 != 'L' || *name1 != 'Q') { 2373 // Something is terribly wrong. Shouldn't be here. 2374 return false; 2375 } 2376 } 2377 if (*name2 == '[') { 2378 do { 2379 name2++; 2380 } while (*name2 == '['); 2381 if (*name2 != 'L' || *name2 != 'Q') { 2382 // Something is terribly wrong. Shouldn't be here. 2383 return false; 2384 } 2385 } 2386 2387 // Check that package part is identical 2388 int length1 = last_slash1 - name1; 2389 int length2 = last_slash2 - name2; 2390 2391 return UTF8::equal(name1, length1, name2, length2); 2392 } 2393 } 2394 } 2395 2396 // Returns true iff super_method can be overridden by a method in targetclassname 2397 // See JSL 3rd edition 8.4.6.1 2398 // Assumes name-signature match 2399 // "this" is InstanceKlass of super_method which must exist 2400 // note that the InstanceKlass of the method in the targetclassname has not always been created yet 2401 bool InstanceKlass::is_override(const methodHandle& super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) { 2402 // Private methods can not be overridden 2403 if (super_method->is_private()) { 2404 return false; 2405 } 2406 // If super method is accessible, then override 2407 if ((super_method->is_protected()) || 2408 (super_method->is_public())) { 2409 return true; 2410 } 2411 // Package-private methods are not inherited outside of package 2412 assert(super_method->is_package_private(), "must be package private"); 2413 return(is_same_class_package(targetclassloader(), targetclassname)); 2414 } 2415 2416 /* defined for now in jvm.cpp, for historical reasons *-- 2417 Klass* InstanceKlass::compute_enclosing_class_impl(instanceKlassHandle self, 2418 Symbol*& simple_name_result, TRAPS) { 2419 ... 2420 } 2421 */ 2422 2423 // tell if two classes have the same enclosing class (at package level) 2424 bool InstanceKlass::is_same_package_member_impl(const InstanceKlass* class1, 2425 const Klass* class2, 2426 TRAPS) { 2427 if (class2 == class1) return true; 2428 if (!class2->is_instance_klass()) return false; 2429 2430 // must be in same package before we try anything else 2431 if (!class1->is_same_class_package(class2->class_loader(), class2->name())) 2432 return false; 2433 2434 // As long as there is an outer1.getEnclosingClass, 2435 // shift the search outward. 2436 const InstanceKlass* outer1 = class1; 2437 for (;;) { 2438 // As we walk along, look for equalities between outer1 and class2. 2439 // Eventually, the walks will terminate as outer1 stops 2440 // at the top-level class around the original class. 2441 bool ignore_inner_is_member; 2442 const Klass* next = outer1->compute_enclosing_class(&ignore_inner_is_member, 2443 CHECK_false); 2444 if (next == NULL) break; 2445 if (next == class2) return true; 2446 outer1 = InstanceKlass::cast(next); 2447 } 2448 2449 // Now do the same for class2. 2450 const InstanceKlass* outer2 = InstanceKlass::cast(class2); 2451 for (;;) { 2452 bool ignore_inner_is_member; 2453 Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member, 2454 CHECK_false); 2455 if (next == NULL) break; 2456 // Might as well check the new outer against all available values. 2457 if (next == class1) return true; 2458 if (next == outer1) return true; 2459 outer2 = InstanceKlass::cast(next); 2460 } 2461 2462 // If by this point we have not found an equality between the 2463 // two classes, we know they are in separate package members. 2464 return false; 2465 } 2466 2467 bool InstanceKlass::find_inner_classes_attr(instanceKlassHandle k, int* ooff, int* noff, TRAPS) { 2468 constantPoolHandle i_cp(THREAD, k->constants()); 2469 for (InnerClassesIterator iter(k); !iter.done(); iter.next()) { 2470 int ioff = iter.inner_class_info_index(); 2471 if (ioff != 0) { 2472 // Check to see if the name matches the class we're looking for 2473 // before attempting to find the class. 2474 if (i_cp->klass_name_at_matches(k, ioff)) { 2475 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false); 2476 if (k() == inner_klass) { 2477 *ooff = iter.outer_class_info_index(); 2478 *noff = iter.inner_name_index(); 2479 return true; 2480 } 2481 } 2482 } 2483 } 2484 return false; 2485 } 2486 2487 InstanceKlass* InstanceKlass::compute_enclosing_class_impl(const InstanceKlass* k, 2488 bool* inner_is_member, 2489 TRAPS) { 2490 InstanceKlass* outer_klass = NULL; 2491 *inner_is_member = false; 2492 int ooff = 0, noff = 0; 2493 if (find_inner_classes_attr(k, &ooff, &noff, THREAD)) { 2494 constantPoolHandle i_cp(THREAD, k->constants()); 2495 if (ooff != 0) { 2496 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL); 2497 outer_klass = InstanceKlass::cast(ok); 2498 *inner_is_member = true; 2499 } 2500 if (NULL == outer_klass) { 2501 // It may be anonymous; try for that. 2502 int encl_method_class_idx = k->enclosing_method_class_index(); 2503 if (encl_method_class_idx != 0) { 2504 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL); 2505 outer_klass = InstanceKlass::cast(ok); 2506 *inner_is_member = false; 2507 } 2508 } 2509 } 2510 2511 // If no inner class attribute found for this class. 2512 if (NULL == outer_klass) return NULL; 2513 2514 // Throws an exception if outer klass has not declared k as an inner klass 2515 // We need evidence that each klass knows about the other, or else 2516 // the system could allow a spoof of an inner class to gain access rights. 2517 Reflection::check_for_inner_class(outer_klass, k, *inner_is_member, CHECK_NULL); 2518 return outer_klass; 2519 } 2520 2521 jint InstanceKlass::compute_modifier_flags(TRAPS) const { 2522 jint access = access_flags().as_int(); 2523 2524 // But check if it happens to be member class. 2525 instanceKlassHandle ik(THREAD, this); 2526 InnerClassesIterator iter(ik); 2527 for (; !iter.done(); iter.next()) { 2528 int ioff = iter.inner_class_info_index(); 2529 // Inner class attribute can be zero, skip it. 2530 // Strange but true: JVM spec. allows null inner class refs. 2531 if (ioff == 0) continue; 2532 2533 // only look at classes that are already loaded 2534 // since we are looking for the flags for our self. 2535 Symbol* inner_name = ik->constants()->klass_name_at(ioff); 2536 if ((ik->name() == inner_name)) { 2537 // This is really a member class. 2538 access = iter.inner_access_flags(); 2539 break; 2540 } 2541 } 2542 // Remember to strip ACC_SUPER bit 2543 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS; 2544 } 2545 2546 jint InstanceKlass::jvmti_class_status() const { 2547 jint result = 0; 2548 2549 if (is_linked()) { 2550 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED; 2551 } 2552 2553 if (is_initialized()) { 2554 assert(is_linked(), "Class status is not consistent"); 2555 result |= JVMTI_CLASS_STATUS_INITIALIZED; 2556 } 2557 if (is_in_error_state()) { 2558 result |= JVMTI_CLASS_STATUS_ERROR; 2559 } 2560 return result; 2561 } 2562 2563 Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) { 2564 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2565 int method_table_offset_in_words = ioe->offset()/wordSize; 2566 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 2567 / itableOffsetEntry::size(); 2568 2569 for (int cnt = 0 ; ; cnt ++, ioe ++) { 2570 // If the interface isn't implemented by the receiver class, 2571 // the VM should throw IncompatibleClassChangeError. 2572 if (cnt >= nof_interfaces) { 2573 THROW_NULL(vmSymbols::java_lang_IncompatibleClassChangeError()); 2574 } 2575 2576 Klass* ik = ioe->interface_klass(); 2577 if (ik == holder) break; 2578 } 2579 2580 itableMethodEntry* ime = ioe->first_method_entry(this); 2581 Method* m = ime[index].method(); 2582 if (m == NULL) { 2583 THROW_NULL(vmSymbols::java_lang_AbstractMethodError()); 2584 } 2585 return m; 2586 } 2587 2588 2589 #if INCLUDE_JVMTI 2590 // update default_methods for redefineclasses for methods that are 2591 // not yet in the vtable due to concurrent subclass define and superinterface 2592 // redefinition 2593 // Note: those in the vtable, should have been updated via adjust_method_entries 2594 void InstanceKlass::adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed) { 2595 // search the default_methods for uses of either obsolete or EMCP methods 2596 if (default_methods() != NULL) { 2597 for (int index = 0; index < default_methods()->length(); index ++) { 2598 Method* old_method = default_methods()->at(index); 2599 if (old_method == NULL || old_method->method_holder() != holder || !old_method->is_old()) { 2600 continue; // skip uninteresting entries 2601 } 2602 assert(!old_method->is_deleted(), "default methods may not be deleted"); 2603 2604 Method* new_method = holder->method_with_idnum(old_method->orig_method_idnum()); 2605 2606 assert(new_method != NULL, "method_with_idnum() should not be NULL"); 2607 assert(old_method != new_method, "sanity check"); 2608 2609 default_methods()->at_put(index, new_method); 2610 if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) { 2611 if (!(*trace_name_printed)) { 2612 // RC_TRACE_MESG macro has an embedded ResourceMark 2613 RC_TRACE_MESG(("adjust: klassname=%s default methods from name=%s", 2614 external_name(), 2615 old_method->method_holder()->external_name())); 2616 *trace_name_printed = true; 2617 } 2618 RC_TRACE(0x00100000, ("default method update: %s(%s) ", 2619 new_method->name()->as_C_string(), 2620 new_method->signature()->as_C_string())); 2621 } 2622 } 2623 } 2624 } 2625 #endif // INCLUDE_JVMTI 2626 2627 // On-stack replacement stuff 2628 void InstanceKlass::add_osr_nmethod(nmethod* n) { 2629 // only one compilation can be active 2630 { 2631 // This is a short non-blocking critical region, so the no safepoint check is ok. 2632 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2633 assert(n->is_osr_method(), "wrong kind of nmethod"); 2634 n->set_osr_link(osr_nmethods_head()); 2635 set_osr_nmethods_head(n); 2636 // Raise the highest osr level if necessary 2637 if (TieredCompilation) { 2638 Method* m = n->method(); 2639 m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level())); 2640 } 2641 } 2642 2643 // Get rid of the osr methods for the same bci that have lower levels. 2644 if (TieredCompilation) { 2645 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) { 2646 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true); 2647 if (inv != NULL && inv->is_in_use()) { 2648 inv->make_not_entrant(); 2649 } 2650 } 2651 } 2652 } 2653 2654 2655 void InstanceKlass::remove_osr_nmethod(nmethod* n) { 2656 // This is a short non-blocking critical region, so the no safepoint check is ok. 2657 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2658 assert(n->is_osr_method(), "wrong kind of nmethod"); 2659 nmethod* last = NULL; 2660 nmethod* cur = osr_nmethods_head(); 2661 int max_level = CompLevel_none; // Find the max comp level excluding n 2662 Method* m = n->method(); 2663 // Search for match 2664 while(cur != NULL && cur != n) { 2665 if (TieredCompilation && m == cur->method()) { 2666 // Find max level before n 2667 max_level = MAX2(max_level, cur->comp_level()); 2668 } 2669 last = cur; 2670 cur = cur->osr_link(); 2671 } 2672 nmethod* next = NULL; 2673 if (cur == n) { 2674 next = cur->osr_link(); 2675 if (last == NULL) { 2676 // Remove first element 2677 set_osr_nmethods_head(next); 2678 } else { 2679 last->set_osr_link(next); 2680 } 2681 } 2682 n->set_osr_link(NULL); 2683 if (TieredCompilation) { 2684 cur = next; 2685 while (cur != NULL) { 2686 // Find max level after n 2687 if (m == cur->method()) { 2688 max_level = MAX2(max_level, cur->comp_level()); 2689 } 2690 cur = cur->osr_link(); 2691 } 2692 m->set_highest_osr_comp_level(max_level); 2693 } 2694 } 2695 2696 int InstanceKlass::mark_osr_nmethods(const Method* m) { 2697 // This is a short non-blocking critical region, so the no safepoint check is ok. 2698 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2699 nmethod* osr = osr_nmethods_head(); 2700 int found = 0; 2701 while (osr != NULL) { 2702 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2703 if (osr->method() == m) { 2704 osr->mark_for_deoptimization(); 2705 found++; 2706 } 2707 osr = osr->osr_link(); 2708 } 2709 return found; 2710 } 2711 2712 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { 2713 // This is a short non-blocking critical region, so the no safepoint check is ok. 2714 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 2715 nmethod* osr = osr_nmethods_head(); 2716 nmethod* best = NULL; 2717 while (osr != NULL) { 2718 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 2719 // There can be a time when a c1 osr method exists but we are waiting 2720 // for a c2 version. When c2 completes its osr nmethod we will trash 2721 // the c1 version and only be able to find the c2 version. However 2722 // while we overflow in the c1 code at back branches we don't want to 2723 // try and switch to the same code as we are already running 2724 2725 if (osr->method() == m && 2726 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) { 2727 if (match_level) { 2728 if (osr->comp_level() == comp_level) { 2729 // Found a match - return it. 2730 return osr; 2731 } 2732 } else { 2733 if (best == NULL || (osr->comp_level() > best->comp_level())) { 2734 if (osr->comp_level() == CompLevel_highest_tier) { 2735 // Found the best possible - return it. 2736 return osr; 2737 } 2738 best = osr; 2739 } 2740 } 2741 } 2742 osr = osr->osr_link(); 2743 } 2744 if (best != NULL && best->comp_level() >= comp_level && match_level == false) { 2745 return best; 2746 } 2747 return NULL; 2748 } 2749 2750 bool InstanceKlass::add_member_name(Handle mem_name) { 2751 jweak mem_name_wref = JNIHandles::make_weak_global(mem_name); 2752 MutexLocker ml(MemberNameTable_lock); 2753 DEBUG_ONLY(NoSafepointVerifier nsv); 2754 2755 // Check if method has been redefined while taking out MemberNameTable_lock, if so 2756 // return false. We cannot cache obsolete methods. They will crash when the function 2757 // is called! 2758 Method* method = (Method*)java_lang_invoke_MemberName::vmtarget(mem_name()); 2759 if (method->is_obsolete()) { 2760 return false; 2761 } else if (method->is_old()) { 2762 // Replace method with redefined version 2763 java_lang_invoke_MemberName::set_vmtarget(mem_name(), method_with_idnum(method->method_idnum())); 2764 } 2765 2766 if (_member_names == NULL) { 2767 _member_names = new (ResourceObj::C_HEAP, mtClass) MemberNameTable(idnum_allocated_count()); 2768 } 2769 _member_names->add_member_name(mem_name_wref); 2770 return true; 2771 } 2772 2773 // ----------------------------------------------------------------------------------------------------- 2774 // Printing 2775 2776 #ifndef PRODUCT 2777 2778 #define BULLET " - " 2779 2780 static const char* state_names[] = { 2781 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error" 2782 }; 2783 2784 static void print_vtable(address self, intptr_t* start, int len, outputStream* st) { 2785 ResourceMark rm; 2786 int* forward_refs = NEW_RESOURCE_ARRAY(int, len); 2787 for (int i = 0; i < len; i++) forward_refs[i] = 0; 2788 for (int i = 0; i < len; i++) { 2789 intptr_t e = start[i]; 2790 st->print("%d : " INTPTR_FORMAT, i, e); 2791 if (forward_refs[i] != 0) { 2792 int from = forward_refs[i]; 2793 int off = (int) start[from]; 2794 st->print(" (offset %d <= [%d])", off, from); 2795 } 2796 if (e != 0 && ((Metadata*)e)->is_metaspace_object()) { 2797 st->print(" "); 2798 ((Metadata*)e)->print_value_on(st); 2799 } else if (self != NULL && e > 0 && e < 0x10000) { 2800 address location = self + e; 2801 int index = (int)((intptr_t*)location - start); 2802 st->print(" (offset %d => [%d])", (int)e, index); 2803 if (index >= 0 && index < len) 2804 forward_refs[index] = i; 2805 } 2806 st->cr(); 2807 } 2808 } 2809 2810 static void print_vtable(vtableEntry* start, int len, outputStream* st) { 2811 return print_vtable(NULL, reinterpret_cast<intptr_t*>(start), len, st); 2812 } 2813 2814 template<typename T> 2815 static void print_array_on(outputStream* st, Array<T>* array) { 2816 if (array == NULL) { st->print_cr("NULL"); return; } 2817 array->print_value_on(st); st->cr(); 2818 if (Verbose || WizardMode) { 2819 for (int i = 0; i < array->length(); i++) { 2820 st->print("%d : ", i); array->at(i)->print_value_on(st); st->cr(); 2821 } 2822 } 2823 } 2824 static void print_array_on(outputStream* st, Array<int>* array) { 2825 if (array == NULL) { st->print_cr("NULL"); return; } 2826 array->print_value_on(st); st->cr(); 2827 if (Verbose || WizardMode) { 2828 for (int i = 0; i < array->length(); i++) { 2829 st->print("%d : %d", i, array->at(i)); st->cr(); 2830 } 2831 } 2832 } 2833 2834 void InstanceKlass::print_on(outputStream* st) const { 2835 assert(is_klass(), "must be klass"); 2836 Klass::print_on(st); 2837 2838 st->print(BULLET"instance size: %d", size_helper()); st->cr(); 2839 st->print(BULLET"klass size: %d", size()); st->cr(); 2840 st->print(BULLET"access: "); access_flags().print_on(st); st->cr(); 2841 st->print(BULLET"misc flags: 0x%x", _misc_flags); st->cr(); 2842 st->print(BULLET"state: "); st->print_cr("%s", state_names[_init_state]); 2843 st->print(BULLET"name: "); name()->print_value_on(st); st->cr(); 2844 st->print(BULLET"super: "); super()->print_value_on_maybe_null(st); st->cr(); 2845 st->print(BULLET"sub: "); 2846 Klass* sub = subklass(); 2847 int n; 2848 for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) { 2849 if (n < MaxSubklassPrintSize) { 2850 sub->print_value_on(st); 2851 st->print(" "); 2852 } 2853 } 2854 if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize); 2855 st->cr(); 2856 2857 if (is_interface()) { 2858 st->print_cr(BULLET"nof implementors: %d", nof_implementors()); 2859 if (nof_implementors() == 1) { 2860 st->print_cr(BULLET"implementor: "); 2861 st->print(" "); 2862 implementor()->print_value_on(st); 2863 st->cr(); 2864 } 2865 } 2866 2867 st->print(BULLET"arrays: "); array_klasses()->print_value_on_maybe_null(st); st->cr(); 2868 st->print(BULLET"methods: "); print_array_on(st, methods()); 2869 st->print(BULLET"method ordering: "); print_array_on(st, method_ordering()); 2870 st->print(BULLET"default_methods: "); print_array_on(st, default_methods()); 2871 if (default_vtable_indices() != NULL) { 2872 st->print(BULLET"default vtable indices: "); print_array_on(st, default_vtable_indices()); 2873 } 2874 st->print(BULLET"local interfaces: "); print_array_on(st, local_interfaces()); 2875 st->print(BULLET"trans. interfaces: "); print_array_on(st, transitive_interfaces()); 2876 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr(); 2877 if (class_loader_data() != NULL) { 2878 st->print(BULLET"class loader data: "); 2879 class_loader_data()->print_value_on(st); 2880 st->cr(); 2881 } 2882 st->print(BULLET"host class: "); host_klass()->print_value_on_maybe_null(st); st->cr(); 2883 if (source_file_name() != NULL) { 2884 st->print(BULLET"source file: "); 2885 source_file_name()->print_value_on(st); 2886 st->cr(); 2887 } 2888 if (source_debug_extension() != NULL) { 2889 st->print(BULLET"source debug extension: "); 2890 st->print("%s", source_debug_extension()); 2891 st->cr(); 2892 } 2893 st->print(BULLET"class annotations: "); class_annotations()->print_value_on(st); st->cr(); 2894 st->print(BULLET"class type annotations: "); class_type_annotations()->print_value_on(st); st->cr(); 2895 st->print(BULLET"field annotations: "); fields_annotations()->print_value_on(st); st->cr(); 2896 st->print(BULLET"field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr(); 2897 { 2898 bool have_pv = false; 2899 // previous versions are linked together through the InstanceKlass 2900 for (InstanceKlass* pv_node = _previous_versions; 2901 pv_node != NULL; 2902 pv_node = pv_node->previous_versions()) { 2903 if (!have_pv) 2904 st->print(BULLET"previous version: "); 2905 have_pv = true; 2906 pv_node->constants()->print_value_on(st); 2907 } 2908 if (have_pv) st->cr(); 2909 } 2910 2911 if (generic_signature() != NULL) { 2912 st->print(BULLET"generic signature: "); 2913 generic_signature()->print_value_on(st); 2914 st->cr(); 2915 } 2916 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr(); 2917 st->print(BULLET"java mirror: "); java_mirror()->print_value_on(st); st->cr(); 2918 st->print(BULLET"vtable length %d (start addr: " INTPTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr(); 2919 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st); 2920 st->print(BULLET"itable length %d (start addr: " INTPTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr(); 2921 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(NULL, start_of_itable(), itable_length(), st); 2922 st->print_cr(BULLET"---- static fields (%d words):", static_field_size()); 2923 FieldPrinter print_static_field(st); 2924 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field); 2925 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size()); 2926 FieldPrinter print_nonstatic_field(st); 2927 InstanceKlass* ik = const_cast<InstanceKlass*>(this); 2928 ik->do_nonstatic_fields(&print_nonstatic_field); 2929 2930 st->print(BULLET"non-static oop maps: "); 2931 OopMapBlock* map = start_of_nonstatic_oop_maps(); 2932 OopMapBlock* end_map = map + nonstatic_oop_map_count(); 2933 while (map < end_map) { 2934 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1)); 2935 map++; 2936 } 2937 st->cr(); 2938 } 2939 2940 #endif //PRODUCT 2941 2942 void InstanceKlass::print_value_on(outputStream* st) const { 2943 assert(is_klass(), "must be klass"); 2944 if (Verbose || WizardMode) access_flags().print_on(st); 2945 name()->print_value_on(st); 2946 } 2947 2948 #ifndef PRODUCT 2949 2950 void FieldPrinter::do_field(fieldDescriptor* fd) { 2951 _st->print(BULLET); 2952 if (_obj == NULL) { 2953 fd->print_on(_st); 2954 _st->cr(); 2955 } else { 2956 fd->print_on_for(_st, _obj); 2957 } 2958 } 2959 2960 2961 void InstanceKlass::oop_print_on(oop obj, outputStream* st) { 2962 Klass::oop_print_on(obj, st); 2963 2964 if (this == SystemDictionary::String_klass()) { 2965 typeArrayOop value = java_lang_String::value(obj); 2966 juint length = java_lang_String::length(obj); 2967 if (value != NULL && 2968 value->is_typeArray() && 2969 length <= (juint) value->length()) { 2970 st->print(BULLET"string: "); 2971 java_lang_String::print(obj, st); 2972 st->cr(); 2973 if (!WizardMode) return; // that is enough 2974 } 2975 } 2976 2977 st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj)); 2978 FieldPrinter print_field(st, obj); 2979 do_nonstatic_fields(&print_field); 2980 2981 if (this == SystemDictionary::Class_klass()) { 2982 st->print(BULLET"signature: "); 2983 java_lang_Class::print_signature(obj, st); 2984 st->cr(); 2985 Klass* mirrored_klass = java_lang_Class::as_Klass(obj); 2986 st->print(BULLET"fake entry for mirror: "); 2987 mirrored_klass->print_value_on_maybe_null(st); 2988 st->cr(); 2989 Klass* array_klass = java_lang_Class::array_klass(obj); 2990 st->print(BULLET"fake entry for array: "); 2991 array_klass->print_value_on_maybe_null(st); 2992 st->cr(); 2993 st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj)); 2994 st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj)); 2995 Klass* real_klass = java_lang_Class::as_Klass(obj); 2996 if (real_klass != NULL && real_klass->is_instance_klass()) { 2997 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field); 2998 } 2999 } else if (this == SystemDictionary::MethodType_klass()) { 3000 st->print(BULLET"signature: "); 3001 java_lang_invoke_MethodType::print_signature(obj, st); 3002 st->cr(); 3003 } 3004 } 3005 3006 #endif //PRODUCT 3007 3008 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) { 3009 st->print("a "); 3010 name()->print_value_on(st); 3011 obj->print_address_on(st); 3012 if (this == SystemDictionary::String_klass() 3013 && java_lang_String::value(obj) != NULL) { 3014 ResourceMark rm; 3015 int len = java_lang_String::length(obj); 3016 int plen = (len < 24 ? len : 12); 3017 char* str = java_lang_String::as_utf8_string(obj, 0, plen); 3018 st->print(" = \"%s\"", str); 3019 if (len > plen) 3020 st->print("...[%d]", len); 3021 } else if (this == SystemDictionary::Class_klass()) { 3022 Klass* k = java_lang_Class::as_Klass(obj); 3023 st->print(" = "); 3024 if (k != NULL) { 3025 k->print_value_on(st); 3026 } else { 3027 const char* tname = type2name(java_lang_Class::primitive_type(obj)); 3028 st->print("%s", tname ? tname : "type?"); 3029 } 3030 } else if (this == SystemDictionary::MethodType_klass()) { 3031 st->print(" = "); 3032 java_lang_invoke_MethodType::print_signature(obj, st); 3033 } else if (java_lang_boxing_object::is_instance(obj)) { 3034 st->print(" = "); 3035 java_lang_boxing_object::print(obj, st); 3036 } else if (this == SystemDictionary::LambdaForm_klass()) { 3037 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj); 3038 if (vmentry != NULL) { 3039 st->print(" => "); 3040 vmentry->print_value_on(st); 3041 } 3042 } else if (this == SystemDictionary::MemberName_klass()) { 3043 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj); 3044 if (vmtarget != NULL) { 3045 st->print(" = "); 3046 vmtarget->print_value_on(st); 3047 } else { 3048 java_lang_invoke_MemberName::clazz(obj)->print_value_on(st); 3049 st->print("."); 3050 java_lang_invoke_MemberName::name(obj)->print_value_on(st); 3051 } 3052 } 3053 } 3054 3055 const char* InstanceKlass::internal_name() const { 3056 return external_name(); 3057 } 3058 3059 void InstanceKlass::print_loading_log(LogLevel::type type, 3060 ClassLoaderData* loader_data, 3061 const ClassFileStream* cfs) const { 3062 ResourceMark rm; 3063 outputStream* log; 3064 3065 assert(type == LogLevel::Info || type == LogLevel::Debug, "sanity"); 3066 3067 if (type == LogLevel::Info) { 3068 log = LogHandle(classload)::info_stream(); 3069 } else { 3070 assert(type == LogLevel::Debug, 3071 "print_loading_log supports only Debug and Info levels"); 3072 log = LogHandle(classload)::debug_stream(); 3073 } 3074 3075 // Name and class hierarchy info 3076 log->print("%s", external_name()); 3077 3078 // Source 3079 if (cfs != NULL) { 3080 if (cfs->source() != NULL) { 3081 log->print(" source: %s", cfs->source()); 3082 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) { 3083 Thread* THREAD = Thread::current(); 3084 Klass* caller = 3085 THREAD->is_Java_thread() 3086 ? ((JavaThread*)THREAD)->security_get_caller_class(1) 3087 : NULL; 3088 // caller can be NULL, for example, during a JVMTI VM_Init hook 3089 if (caller != NULL) { 3090 log->print(" source: instance of %s", caller->external_name()); 3091 } else { 3092 // source is unknown 3093 } 3094 } else { 3095 Handle class_loader(loader_data->class_loader()); 3096 log->print(" source: %s", class_loader->klass()->external_name()); 3097 } 3098 } else { 3099 log->print(" source: shared objects file"); 3100 } 3101 3102 if (type == LogLevel::Debug) { 3103 // Class hierarchy info 3104 log->print(" klass: " INTPTR_FORMAT " super: " INTPTR_FORMAT, 3105 p2i(this), p2i(superklass())); 3106 3107 if (local_interfaces() != NULL && local_interfaces()->length() > 0) { 3108 log->print(" interfaces:"); 3109 int length = local_interfaces()->length(); 3110 for (int i = 0; i < length; i++) { 3111 log->print(" " INTPTR_FORMAT, 3112 p2i(InstanceKlass::cast(local_interfaces()->at(i)))); 3113 } 3114 } 3115 3116 // Class loader 3117 log->print(" loader: ["); 3118 loader_data->print_value_on(log); 3119 log->print("]"); 3120 3121 // Classfile checksum 3122 if (cfs) { 3123 log->print(" bytes: %d checksum: %08x", 3124 cfs->length(), 3125 ClassLoader::crc32(0, (const char*)cfs->buffer(), 3126 cfs->length())); 3127 } 3128 } 3129 log->cr(); 3130 } 3131 3132 #if INCLUDE_SERVICES 3133 // Size Statistics 3134 void InstanceKlass::collect_statistics(KlassSizeStats *sz) const { 3135 Klass::collect_statistics(sz); 3136 3137 sz->_inst_size = wordSize * size_helper(); 3138 sz->_vtab_bytes = wordSize * vtable_length(); 3139 sz->_itab_bytes = wordSize * itable_length(); 3140 sz->_nonstatic_oopmap_bytes = wordSize * nonstatic_oop_map_size(); 3141 3142 int n = 0; 3143 n += (sz->_methods_array_bytes = sz->count_array(methods())); 3144 n += (sz->_method_ordering_bytes = sz->count_array(method_ordering())); 3145 n += (sz->_local_interfaces_bytes = sz->count_array(local_interfaces())); 3146 n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces())); 3147 n += (sz->_fields_bytes = sz->count_array(fields())); 3148 n += (sz->_inner_classes_bytes = sz->count_array(inner_classes())); 3149 sz->_ro_bytes += n; 3150 3151 const ConstantPool* cp = constants(); 3152 if (cp) { 3153 cp->collect_statistics(sz); 3154 } 3155 3156 const Annotations* anno = annotations(); 3157 if (anno) { 3158 anno->collect_statistics(sz); 3159 } 3160 3161 const Array<Method*>* methods_array = methods(); 3162 if (methods()) { 3163 for (int i = 0; i < methods_array->length(); i++) { 3164 Method* method = methods_array->at(i); 3165 if (method) { 3166 sz->_method_count ++; 3167 method->collect_statistics(sz); 3168 } 3169 } 3170 } 3171 } 3172 #endif // INCLUDE_SERVICES 3173 3174 // Verification 3175 3176 class VerifyFieldClosure: public OopClosure { 3177 protected: 3178 template <class T> void do_oop_work(T* p) { 3179 oop obj = oopDesc::load_decode_heap_oop(p); 3180 if (!obj->is_oop_or_null()) { 3181 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj)); 3182 Universe::print_on(tty); 3183 guarantee(false, "boom"); 3184 } 3185 } 3186 public: 3187 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); } 3188 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); } 3189 }; 3190 3191 void InstanceKlass::verify_on(outputStream* st) { 3192 #ifndef PRODUCT 3193 // Avoid redundant verifies, this really should be in product. 3194 if (_verify_count == Universe::verify_count()) return; 3195 _verify_count = Universe::verify_count(); 3196 #endif 3197 3198 // Verify Klass 3199 Klass::verify_on(st); 3200 3201 // Verify that klass is present in ClassLoaderData 3202 guarantee(class_loader_data()->contains_klass(this), 3203 "this class isn't found in class loader data"); 3204 3205 // Verify vtables 3206 if (is_linked()) { 3207 ResourceMark rm; 3208 // $$$ This used to be done only for m/s collections. Doing it 3209 // always seemed a valid generalization. (DLD -- 6/00) 3210 vtable()->verify(st); 3211 } 3212 3213 // Verify first subklass 3214 if (subklass() != NULL) { 3215 guarantee(subklass()->is_klass(), "should be klass"); 3216 } 3217 3218 // Verify siblings 3219 Klass* super = this->super(); 3220 Klass* sib = next_sibling(); 3221 if (sib != NULL) { 3222 if (sib == this) { 3223 fatal("subclass points to itself " PTR_FORMAT, p2i(sib)); 3224 } 3225 3226 guarantee(sib->is_klass(), "should be klass"); 3227 guarantee(sib->super() == super, "siblings should have same superklass"); 3228 } 3229 3230 // Verify implementor fields 3231 Klass* im = implementor(); 3232 if (im != NULL) { 3233 guarantee(is_interface(), "only interfaces should have implementor set"); 3234 guarantee(im->is_klass(), "should be klass"); 3235 guarantee(!im->is_interface() || im == this, 3236 "implementors cannot be interfaces"); 3237 } 3238 3239 // Verify local interfaces 3240 if (local_interfaces()) { 3241 Array<Klass*>* local_interfaces = this->local_interfaces(); 3242 for (int j = 0; j < local_interfaces->length(); j++) { 3243 Klass* e = local_interfaces->at(j); 3244 guarantee(e->is_klass() && e->is_interface(), "invalid local interface"); 3245 } 3246 } 3247 3248 // Verify transitive interfaces 3249 if (transitive_interfaces() != NULL) { 3250 Array<Klass*>* transitive_interfaces = this->transitive_interfaces(); 3251 for (int j = 0; j < transitive_interfaces->length(); j++) { 3252 Klass* e = transitive_interfaces->at(j); 3253 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface"); 3254 } 3255 } 3256 3257 // Verify methods 3258 if (methods() != NULL) { 3259 Array<Method*>* methods = this->methods(); 3260 for (int j = 0; j < methods->length(); j++) { 3261 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3262 } 3263 for (int j = 0; j < methods->length() - 1; j++) { 3264 Method* m1 = methods->at(j); 3265 Method* m2 = methods->at(j + 1); 3266 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3267 } 3268 } 3269 3270 // Verify method ordering 3271 if (method_ordering() != NULL) { 3272 Array<int>* method_ordering = this->method_ordering(); 3273 int length = method_ordering->length(); 3274 if (JvmtiExport::can_maintain_original_method_order() || 3275 ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) { 3276 guarantee(length == methods()->length(), "invalid method ordering length"); 3277 jlong sum = 0; 3278 for (int j = 0; j < length; j++) { 3279 int original_index = method_ordering->at(j); 3280 guarantee(original_index >= 0, "invalid method ordering index"); 3281 guarantee(original_index < length, "invalid method ordering index"); 3282 sum += original_index; 3283 } 3284 // Verify sum of indices 0,1,...,length-1 3285 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum"); 3286 } else { 3287 guarantee(length == 0, "invalid method ordering length"); 3288 } 3289 } 3290 3291 // Verify default methods 3292 if (default_methods() != NULL) { 3293 Array<Method*>* methods = this->default_methods(); 3294 for (int j = 0; j < methods->length(); j++) { 3295 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3296 } 3297 for (int j = 0; j < methods->length() - 1; j++) { 3298 Method* m1 = methods->at(j); 3299 Method* m2 = methods->at(j + 1); 3300 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3301 } 3302 } 3303 3304 // Verify JNI static field identifiers 3305 if (jni_ids() != NULL) { 3306 jni_ids()->verify(this); 3307 } 3308 3309 // Verify other fields 3310 if (array_klasses() != NULL) { 3311 guarantee(array_klasses()->is_klass(), "should be klass"); 3312 } 3313 if (constants() != NULL) { 3314 guarantee(constants()->is_constantPool(), "should be constant pool"); 3315 } 3316 const Klass* host = host_klass(); 3317 if (host != NULL) { 3318 guarantee(host->is_klass(), "should be klass"); 3319 } 3320 } 3321 3322 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) { 3323 Klass::oop_verify_on(obj, st); 3324 VerifyFieldClosure blk; 3325 obj->oop_iterate_no_header(&blk); 3326 } 3327 3328 3329 // JNIid class for jfieldIDs only 3330 // Note to reviewers: 3331 // These JNI functions are just moved over to column 1 and not changed 3332 // in the compressed oops workspace. 3333 JNIid::JNIid(Klass* holder, int offset, JNIid* next) { 3334 _holder = holder; 3335 _offset = offset; 3336 _next = next; 3337 debug_only(_is_static_field_id = false;) 3338 } 3339 3340 3341 JNIid* JNIid::find(int offset) { 3342 JNIid* current = this; 3343 while (current != NULL) { 3344 if (current->offset() == offset) return current; 3345 current = current->next(); 3346 } 3347 return NULL; 3348 } 3349 3350 void JNIid::deallocate(JNIid* current) { 3351 while (current != NULL) { 3352 JNIid* next = current->next(); 3353 delete current; 3354 current = next; 3355 } 3356 } 3357 3358 3359 void JNIid::verify(Klass* holder) { 3360 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields(); 3361 int end_field_offset; 3362 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize); 3363 3364 JNIid* current = this; 3365 while (current != NULL) { 3366 guarantee(current->holder() == holder, "Invalid klass in JNIid"); 3367 #ifdef ASSERT 3368 int o = current->offset(); 3369 if (current->is_static_field_id()) { 3370 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid"); 3371 } 3372 #endif 3373 current = current->next(); 3374 } 3375 } 3376 3377 3378 #ifdef ASSERT 3379 void InstanceKlass::set_init_state(ClassState state) { 3380 bool good_state = is_shared() ? (_init_state <= state) 3381 : (_init_state < state); 3382 assert(good_state || state == allocated, "illegal state transition"); 3383 _init_state = (u1)state; 3384 } 3385 #endif 3386 3387 3388 3389 // RedefineClasses() support for previous versions: 3390 int InstanceKlass::_previous_version_count = 0; 3391 3392 // Purge previous versions before adding new previous versions of the class. 3393 void InstanceKlass::purge_previous_versions(InstanceKlass* ik) { 3394 if (ik->previous_versions() != NULL) { 3395 // This klass has previous versions so see what we can cleanup 3396 // while it is safe to do so. 3397 3398 int deleted_count = 0; // leave debugging breadcrumbs 3399 int live_count = 0; 3400 ClassLoaderData* loader_data = ik->class_loader_data(); 3401 assert(loader_data != NULL, "should never be null"); 3402 3403 // RC_TRACE macro has an embedded ResourceMark 3404 RC_TRACE(0x00000200, ("purge: %s: previous versions", ik->external_name())); 3405 3406 // previous versions are linked together through the InstanceKlass 3407 InstanceKlass* pv_node = ik->previous_versions(); 3408 InstanceKlass* last = ik; 3409 int version = 0; 3410 3411 // check the previous versions list 3412 for (; pv_node != NULL; ) { 3413 3414 ConstantPool* pvcp = pv_node->constants(); 3415 assert(pvcp != NULL, "cp ref was unexpectedly cleared"); 3416 3417 if (!pvcp->on_stack()) { 3418 // If the constant pool isn't on stack, none of the methods 3419 // are executing. Unlink this previous_version. 3420 // The previous version InstanceKlass is on the ClassLoaderData deallocate list 3421 // so will be deallocated during the next phase of class unloading. 3422 RC_TRACE(0x00000200, ("purge: previous version " INTPTR_FORMAT " is dead", 3423 p2i(pv_node))); 3424 // For debugging purposes. 3425 pv_node->set_is_scratch_class(); 3426 pv_node->class_loader_data()->add_to_deallocate_list(pv_node); 3427 pv_node = pv_node->previous_versions(); 3428 last->link_previous_versions(pv_node); 3429 deleted_count++; 3430 version++; 3431 continue; 3432 } else { 3433 RC_TRACE(0x00000200, ("purge: previous version " INTPTR_FORMAT " is alive", 3434 p2i(pv_node))); 3435 assert(pvcp->pool_holder() != NULL, "Constant pool with no holder"); 3436 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack"); 3437 live_count++; 3438 } 3439 3440 // At least one method is live in this previous version. 3441 // Reset dead EMCP methods not to get breakpoints. 3442 // All methods are deallocated when all of the methods for this class are no 3443 // longer running. 3444 Array<Method*>* method_refs = pv_node->methods(); 3445 if (method_refs != NULL) { 3446 RC_TRACE(0x00000200, ("purge: previous methods length=%d", 3447 method_refs->length())); 3448 for (int j = 0; j < method_refs->length(); j++) { 3449 Method* method = method_refs->at(j); 3450 3451 if (!method->on_stack()) { 3452 // no breakpoints for non-running methods 3453 if (method->is_running_emcp()) { 3454 method->set_running_emcp(false); 3455 } 3456 } else { 3457 assert (method->is_obsolete() || method->is_running_emcp(), 3458 "emcp method cannot run after emcp bit is cleared"); 3459 // RC_TRACE macro has an embedded ResourceMark 3460 RC_TRACE(0x00000200, 3461 ("purge: %s(%s): prev method @%d in version @%d is alive", 3462 method->name()->as_C_string(), 3463 method->signature()->as_C_string(), j, version)); 3464 } 3465 } 3466 } 3467 // next previous version 3468 last = pv_node; 3469 pv_node = pv_node->previous_versions(); 3470 version++; 3471 } 3472 RC_TRACE(0x00000200, 3473 ("purge: previous version stats: live=%d, deleted=%d", live_count, 3474 deleted_count)); 3475 } 3476 } 3477 3478 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods, 3479 int emcp_method_count) { 3480 int obsolete_method_count = old_methods->length() - emcp_method_count; 3481 3482 if (emcp_method_count != 0 && obsolete_method_count != 0 && 3483 _previous_versions != NULL) { 3484 // We have a mix of obsolete and EMCP methods so we have to 3485 // clear out any matching EMCP method entries the hard way. 3486 int local_count = 0; 3487 for (int i = 0; i < old_methods->length(); i++) { 3488 Method* old_method = old_methods->at(i); 3489 if (old_method->is_obsolete()) { 3490 // only obsolete methods are interesting 3491 Symbol* m_name = old_method->name(); 3492 Symbol* m_signature = old_method->signature(); 3493 3494 // previous versions are linked together through the InstanceKlass 3495 int j = 0; 3496 for (InstanceKlass* prev_version = _previous_versions; 3497 prev_version != NULL; 3498 prev_version = prev_version->previous_versions(), j++) { 3499 3500 Array<Method*>* method_refs = prev_version->methods(); 3501 for (int k = 0; k < method_refs->length(); k++) { 3502 Method* method = method_refs->at(k); 3503 3504 if (!method->is_obsolete() && 3505 method->name() == m_name && 3506 method->signature() == m_signature) { 3507 // The current RedefineClasses() call has made all EMCP 3508 // versions of this method obsolete so mark it as obsolete 3509 RC_TRACE(0x00000400, 3510 ("add: %s(%s): flush obsolete method @%d in version @%d", 3511 m_name->as_C_string(), m_signature->as_C_string(), k, j)); 3512 3513 method->set_is_obsolete(); 3514 break; 3515 } 3516 } 3517 3518 // The previous loop may not find a matching EMCP method, but 3519 // that doesn't mean that we can optimize and not go any 3520 // further back in the PreviousVersion generations. The EMCP 3521 // method for this generation could have already been made obsolete, 3522 // but there still may be an older EMCP method that has not 3523 // been made obsolete. 3524 } 3525 3526 if (++local_count >= obsolete_method_count) { 3527 // no more obsolete methods so bail out now 3528 break; 3529 } 3530 } 3531 } 3532 } 3533 } 3534 3535 // Save the scratch_class as the previous version if any of the methods are running. 3536 // The previous_versions are used to set breakpoints in EMCP methods and they are 3537 // also used to clean MethodData links to redefined methods that are no longer running. 3538 void InstanceKlass::add_previous_version(instanceKlassHandle scratch_class, 3539 int emcp_method_count) { 3540 assert(Thread::current()->is_VM_thread(), 3541 "only VMThread can add previous versions"); 3542 3543 // RC_TRACE macro has an embedded ResourceMark 3544 RC_TRACE(0x00000400, ("adding previous version ref for %s, EMCP_cnt=%d", 3545 scratch_class->external_name(), emcp_method_count)); 3546 3547 // Clean out old previous versions 3548 purge_previous_versions(this); 3549 3550 // Mark newly obsolete methods in remaining previous versions. An EMCP method from 3551 // a previous redefinition may be made obsolete by this redefinition. 3552 Array<Method*>* old_methods = scratch_class->methods(); 3553 mark_newly_obsolete_methods(old_methods, emcp_method_count); 3554 3555 // If the constant pool for this previous version of the class 3556 // is not marked as being on the stack, then none of the methods 3557 // in this previous version of the class are on the stack so 3558 // we don't need to add this as a previous version. 3559 ConstantPool* cp_ref = scratch_class->constants(); 3560 if (!cp_ref->on_stack()) { 3561 RC_TRACE(0x00000400, ("add: scratch class not added; no methods are running")); 3562 // For debugging purposes. 3563 scratch_class->set_is_scratch_class(); 3564 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class()); 3565 // Update count for class unloading. 3566 _previous_version_count--; 3567 return; 3568 } 3569 3570 if (emcp_method_count != 0) { 3571 // At least one method is still running, check for EMCP methods 3572 for (int i = 0; i < old_methods->length(); i++) { 3573 Method* old_method = old_methods->at(i); 3574 if (!old_method->is_obsolete() && old_method->on_stack()) { 3575 // if EMCP method (not obsolete) is on the stack, mark as EMCP so that 3576 // we can add breakpoints for it. 3577 3578 // We set the method->on_stack bit during safepoints for class redefinition 3579 // and use this bit to set the is_running_emcp bit. 3580 // After the safepoint, the on_stack bit is cleared and the running emcp 3581 // method may exit. If so, we would set a breakpoint in a method that 3582 // is never reached, but this won't be noticeable to the programmer. 3583 old_method->set_running_emcp(true); 3584 RC_TRACE(0x00000400, ("add: EMCP method %s is on_stack " INTPTR_FORMAT, 3585 old_method->name_and_sig_as_C_string(), p2i(old_method))); 3586 } else if (!old_method->is_obsolete()) { 3587 RC_TRACE(0x00000400, ("add: EMCP method %s is NOT on_stack " INTPTR_FORMAT, 3588 old_method->name_and_sig_as_C_string(), p2i(old_method))); 3589 } 3590 } 3591 } 3592 3593 // Add previous version if any methods are still running. 3594 RC_TRACE(0x00000400, ("add: scratch class added; one of its methods is on_stack")); 3595 assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version"); 3596 scratch_class->link_previous_versions(previous_versions()); 3597 link_previous_versions(scratch_class()); 3598 // Update count for class unloading. 3599 _previous_version_count++; 3600 } // end add_previous_version() 3601 3602 3603 Method* InstanceKlass::method_with_idnum(int idnum) { 3604 Method* m = NULL; 3605 if (idnum < methods()->length()) { 3606 m = methods()->at(idnum); 3607 } 3608 if (m == NULL || m->method_idnum() != idnum) { 3609 for (int index = 0; index < methods()->length(); ++index) { 3610 m = methods()->at(index); 3611 if (m->method_idnum() == idnum) { 3612 return m; 3613 } 3614 } 3615 // None found, return null for the caller to handle. 3616 return NULL; 3617 } 3618 return m; 3619 } 3620 3621 3622 Method* InstanceKlass::method_with_orig_idnum(int idnum) { 3623 if (idnum >= methods()->length()) { 3624 return NULL; 3625 } 3626 Method* m = methods()->at(idnum); 3627 if (m != NULL && m->orig_method_idnum() == idnum) { 3628 return m; 3629 } 3630 // Obsolete method idnum does not match the original idnum 3631 for (int index = 0; index < methods()->length(); ++index) { 3632 m = methods()->at(index); 3633 if (m->orig_method_idnum() == idnum) { 3634 return m; 3635 } 3636 } 3637 // None found, return null for the caller to handle. 3638 return NULL; 3639 } 3640 3641 3642 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) { 3643 InstanceKlass* holder = get_klass_version(version); 3644 if (holder == NULL) { 3645 return NULL; // The version of klass is gone, no method is found 3646 } 3647 Method* method = holder->method_with_orig_idnum(idnum); 3648 return method; 3649 } 3650 3651 3652 jint InstanceKlass::get_cached_class_file_len() { 3653 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file); 3654 } 3655 3656 unsigned char * InstanceKlass::get_cached_class_file_bytes() { 3657 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file); 3658 } 3659 3660 #define THROW_DVT_ERROR(s) \ 3661 Exceptions::fthrow(THREAD_AND_LOCATION, vmSymbols::java_lang_InternalError(), \ 3662 "DeriveValueType class '%s' %s", external_name(),(s)); \ 3663 return 3664 3665 void InstanceKlass::create_derive_value_type(Handle class_loader, Handle protection_domain, TRAPS) { 3666 ResourceMark rm(THREAD); 3667 HandleMark hm(THREAD); 3668 3669 if (!ValhallaMVT1_0) { 3670 return; // Silent fail 3671 } 3672 // Validate VCC... 3673 if (!has_nonstatic_fields()) { 3674 THROW_DVT_ERROR("has no instance fields"); 3675 } 3676 if (is_value()) { 3677 THROW_DVT_ERROR("is already a value type"); 3678 } 3679 if (!access_flags().is_final()) { 3680 THROW_DVT_ERROR("is not a final class"); 3681 } 3682 if (super() != SystemDictionary::Object_klass()) { 3683 THROW_DVT_ERROR("does not derive from Object only"); 3684 } 3685 3686 // All non-static are final 3687 GrowableArray<Handle>* fields = new GrowableArray<Handle>(THREAD, java_fields_count()*2); 3688 GrowableArray<jint>* fields_access = new GrowableArray<jint>(THREAD, java_fields_count()*2); 3689 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 3690 AccessFlags access_flags = fs.access_flags(); 3691 if (access_flags.is_static()) { 3692 continue; 3693 } 3694 if (!access_flags.is_final()) { 3695 THROW_DVT_ERROR("contains non-final instance field"); 3696 } 3697 jint flags = access_flags.get_flags(); 3698 // Remember the field name, signature, access modifiers 3699 Handle h = java_lang_String::create_from_symbol(fs.name(), CHECK); 3700 fields->append(h); 3701 h = java_lang_String::create_from_symbol(fs.signature(), CHECK); 3702 fields->append(h); 3703 fields_access->append(access_flags.get_flags()); 3704 } 3705 3706 // Generate DVT... 3707 log_debug(classload)("Cooking DVT for VCC %s", external_name()); 3708 const char* this_name = name()->as_C_string(); 3709 3710 // Assemble the Java args...field descriptor array 3711 objArrayHandle fdarr = oopFactory::new_objectArray(fields->length(), CHECK); 3712 for (int i = 0; i < fields->length(); i++) { 3713 fdarr->obj_at_put(i, fields->at(i)()); 3714 } 3715 //...field access modifiers array 3716 typeArrayHandle faarr = oopFactory::new_intArray(fields_access->length(), CHECK); 3717 for (int i = 0; i < fields_access->length(); i++) { 3718 faarr->int_at_put(i, fields_access->at(i)); 3719 } 3720 3721 Handle vcc_name_h = java_lang_String::create_from_symbol(name(), CHECK); 3722 // Upcall to our Java helper... 3723 JavaValue result(T_OBJECT); 3724 JavaCallArguments args(5); 3725 args.push_oop(vcc_name_h); 3726 args.push_oop(class_loader); 3727 args.push_oop(protection_domain); 3728 args.push_oop(fdarr); 3729 args.push_oop(faarr); 3730 JavaCalls::call_static(&result, 3731 SystemDictionary::Valhalla_MVT1_0_klass(), 3732 vmSymbols::valhalla_shady_MVT1_0_createDerivedValueType(), 3733 vmSymbols::valhalla_shady_MVT1_0_createDerivedValueType_signature(), 3734 &args, 3735 CHECK); 3736 Handle returned((oop) result.get_jobject()); 3737 if (returned.is_null()) { 3738 THROW_DVT_ERROR("unknown error deriving value type"); 3739 } 3740 TempNewSymbol dvt_name_sym = java_lang_String::as_symbol(returned, CHECK); 3741 3742 Klass* dvt_klass = SystemDictionary::resolve_or_null(dvt_name_sym, 3743 class_loader, 3744 protection_domain, 3745 CHECK); 3746 if (!dvt_klass->is_value()) { 3747 THROW_DVT_ERROR("failed to resolve derived value type"); 3748 } 3749 /** 3750 * Found it, let's point to each other to denote "is_derive_vt()"... 3751 */ 3752 ValueKlass* vt_klass = ValueKlass::cast(dvt_klass); 3753 assert(vt_klass->class_loader() == class_loader(), "DVT Not the same class loader as VCC"); 3754 _derive_value_type_klass = vt_klass; 3755 vt_klass->_derive_value_type_klass = this; 3756 log_debug(classload)("Cooked DVT %s for VCC %s", vt_klass->external_name(), external_name()); 3757 } 3758