1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "jvm.h" 27 #include "aot/aotLoader.hpp" 28 #include "classfile/classFileParser.hpp" 29 #include "classfile/classFileStream.hpp" 30 #include "classfile/classLoader.hpp" 31 #include "classfile/classLoaderData.inline.hpp" 32 #include "classfile/javaClasses.hpp" 33 #include "classfile/moduleEntry.hpp" 34 #include "classfile/systemDictionary.hpp" 35 #include "classfile/systemDictionaryShared.hpp" 36 #include "classfile/verifier.hpp" 37 #include "classfile/vmSymbols.hpp" 38 #include "code/dependencyContext.hpp" 39 #include "compiler/compileBroker.hpp" 40 #include "gc/shared/collectedHeap.inline.hpp" 41 #include "interpreter/oopMapCache.hpp" 42 #include "interpreter/rewriter.hpp" 43 #include "jvmtifiles/jvmti.h" 44 #include "logging/log.hpp" 45 #include "logging/logMessage.hpp" 46 #include "logging/logStream.hpp" 47 #include "memory/allocation.inline.hpp" 48 #include "memory/heapInspection.hpp" 49 #include "memory/iterator.inline.hpp" 50 #include "memory/metadataFactory.hpp" 51 #include "memory/metaspaceClosure.hpp" 52 #include "memory/metaspaceShared.hpp" 53 #include "memory/oopFactory.hpp" 54 #include "memory/resourceArea.hpp" 55 #include "oops/fieldStreams.hpp" 56 #include "oops/instanceClassLoaderKlass.hpp" 57 #include "oops/instanceKlass.inline.hpp" 58 #include "oops/instanceMirrorKlass.hpp" 59 #include "oops/instanceOop.hpp" 60 #include "oops/klass.inline.hpp" 61 #include "oops/method.hpp" 62 #include "oops/oop.inline.hpp" 63 #include "oops/symbol.hpp" 64 #include "oops/valueKlass.hpp" 65 #include "prims/jvmtiExport.hpp" 66 #include "prims/jvmtiRedefineClasses.hpp" 67 #include "prims/jvmtiThreadState.hpp" 68 #include "prims/methodComparator.hpp" 69 #include "runtime/atomic.hpp" 70 #include "runtime/fieldDescriptor.hpp" 71 #include "runtime/handles.inline.hpp" 72 #include "runtime/javaCalls.hpp" 73 #include "runtime/mutexLocker.hpp" 74 #include "runtime/orderAccess.hpp" 75 #include "runtime/thread.inline.hpp" 76 #include "services/classLoadingService.hpp" 77 #include "services/threadService.hpp" 78 #include "utilities/dtrace.hpp" 79 #include "utilities/macros.hpp" 80 #include "utilities/stringUtils.hpp" 81 #ifdef COMPILER1 82 #include "c1/c1_Compiler.hpp" 83 #endif 84 85 #ifdef DTRACE_ENABLED 86 87 88 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED 89 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE 90 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT 91 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS 92 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED 93 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT 94 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR 95 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END 96 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \ 97 { \ 98 char* data = NULL; \ 99 int len = 0; \ 100 Symbol* clss_name = name(); \ 101 if (clss_name != NULL) { \ 102 data = (char*)clss_name->bytes(); \ 103 len = clss_name->utf8_length(); \ 104 } \ 105 HOTSPOT_CLASS_INITIALIZATION_##type( \ 106 data, len, (void*)class_loader(), thread_type); \ 107 } 108 109 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \ 110 { \ 111 char* data = NULL; \ 112 int len = 0; \ 113 Symbol* clss_name = name(); \ 114 if (clss_name != NULL) { \ 115 data = (char*)clss_name->bytes(); \ 116 len = clss_name->utf8_length(); \ 117 } \ 118 HOTSPOT_CLASS_INITIALIZATION_##type( \ 119 data, len, (void*)class_loader(), thread_type, wait); \ 120 } 121 122 #else // ndef DTRACE_ENABLED 123 124 #define DTRACE_CLASSINIT_PROBE(type, thread_type) 125 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) 126 127 #endif // ndef DTRACE_ENABLED 128 129 static inline bool is_class_loader(const Symbol* class_name, 130 const ClassFileParser& parser) { 131 assert(class_name != NULL, "invariant"); 132 133 if (class_name == vmSymbols::java_lang_ClassLoader()) { 134 return true; 135 } 136 137 if (SystemDictionary::ClassLoader_klass_loaded()) { 138 const Klass* const super_klass = parser.super_klass(); 139 if (super_klass != NULL) { 140 if (super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass())) { 141 return true; 142 } 143 } 144 } 145 return false; 146 } 147 148 // called to verify that k is a member of this nest 149 bool InstanceKlass::has_nest_member(InstanceKlass* k, TRAPS) const { 150 if (_nest_members == NULL || _nest_members == Universe::the_empty_short_array()) { 151 if (log_is_enabled(Trace, class, nestmates)) { 152 ResourceMark rm(THREAD); 153 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s", 154 k->external_name(), this->external_name()); 155 } 156 return false; 157 } 158 159 if (log_is_enabled(Trace, class, nestmates)) { 160 ResourceMark rm(THREAD); 161 log_trace(class, nestmates)("Checking nest membership of %s in %s", 162 k->external_name(), this->external_name()); 163 } 164 165 // Check names first and if they match then check actual klass. This avoids 166 // resolving anything unnecessarily. 167 for (int i = 0; i < _nest_members->length(); i++) { 168 int cp_index = _nest_members->at(i); 169 Symbol* name = _constants->klass_name_at(cp_index); 170 if (name == k->name()) { 171 log_trace(class, nestmates)("- Found it at nest_members[%d] => cp[%d]", i, cp_index); 172 173 // names match so check actual klass - this may trigger class loading if 174 // it doesn't match (but that should be impossible) 175 Klass* k2 = _constants->klass_at(cp_index, CHECK_false); 176 if (k2 == k) { 177 log_trace(class, nestmates)("- class is listed as a nest member"); 178 return true; 179 } else { 180 // same name but different klass! 181 log_trace(class, nestmates)(" - klass comparison failed!"); 182 // can't have different classes for the same name, so we're done 183 return false; 184 } 185 } 186 } 187 log_trace(class, nestmates)("- class is NOT a nest member!"); 188 return false; 189 } 190 191 // Return nest-host class, resolving, validating and saving it if needed. 192 // In cases where this is called from a thread that can not do classloading 193 // (such as a native JIT thread) then we simply return NULL, which in turn 194 // causes the access check to return false. Such code will retry the access 195 // from a more suitable environment later. 196 InstanceKlass* InstanceKlass::nest_host(Symbol* validationException, TRAPS) { 197 InstanceKlass* nest_host_k = _nest_host; 198 if (nest_host_k == NULL) { 199 // need to resolve and save our nest-host class. This could be attempted 200 // concurrently but as the result is idempotent and we don't use the class 201 // then we do not need any synchronization beyond what is implicitly used 202 // during class loading. 203 if (_nest_host_index != 0) { // we have a real nest_host 204 // Before trying to resolve check if we're in a suitable context 205 if (!THREAD->can_call_java() && !_constants->tag_at(_nest_host_index).is_klass()) { 206 if (log_is_enabled(Trace, class, nestmates)) { 207 ResourceMark rm(THREAD); 208 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread", 209 this->external_name()); 210 } 211 return NULL; 212 } 213 214 if (log_is_enabled(Trace, class, nestmates)) { 215 ResourceMark rm(THREAD); 216 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s", 217 this->external_name(), 218 _constants->klass_name_at(_nest_host_index)->as_C_string()); 219 } 220 221 Klass* k = _constants->klass_at(_nest_host_index, THREAD); 222 if (HAS_PENDING_EXCEPTION) { 223 Handle exc_h = Handle(THREAD, PENDING_EXCEPTION); 224 if (exc_h->is_a(SystemDictionary::NoClassDefFoundError_klass())) { 225 // throw a new CDNFE with the original as its cause, and a clear msg 226 ResourceMark rm(THREAD); 227 char buf[200]; 228 CLEAR_PENDING_EXCEPTION; 229 jio_snprintf(buf, sizeof(buf), 230 "Unable to load nest-host class (%s) of %s", 231 _constants->klass_name_at(_nest_host_index)->as_C_string(), 232 this->external_name()); 233 log_trace(class, nestmates)("%s - NoClassDefFoundError", buf); 234 THROW_MSG_CAUSE_NULL(vmSymbols::java_lang_NoClassDefFoundError(), buf, exc_h); 235 } 236 // All other exceptions pass through (OOME, StackOverflowError, LinkageErrors etc). 237 return NULL; 238 } 239 240 // A valid nest-host is an instance class in the current package that lists this 241 // class as a nest member. If any of these conditions are not met we post the 242 // requested exception type (if any) and return NULL 243 244 const char* error = NULL; 245 246 // JVMS 5.4.4 indicates package check comes first 247 if (is_same_class_package(k)) { 248 249 // Now check actual membership. We can't be a member if our "host" is 250 // not an instance class. 251 if (k->is_instance_klass()) { 252 nest_host_k = InstanceKlass::cast(k); 253 254 bool is_member = nest_host_k->has_nest_member(this, CHECK_NULL); 255 if (is_member) { 256 // save resolved nest-host value 257 _nest_host = nest_host_k; 258 259 if (log_is_enabled(Trace, class, nestmates)) { 260 ResourceMark rm(THREAD); 261 log_trace(class, nestmates)("Resolved nest-host of %s to %s", 262 this->external_name(), k->external_name()); 263 } 264 return nest_host_k; 265 } 266 } 267 error = "current type is not listed as a nest member"; 268 } else { 269 error = "types are in different packages"; 270 } 271 272 if (log_is_enabled(Trace, class, nestmates)) { 273 ResourceMark rm(THREAD); 274 log_trace(class, nestmates)("Type %s is not a nest member of resolved type %s: %s", 275 this->external_name(), 276 k->external_name(), 277 error); 278 } 279 280 if (validationException != NULL) { 281 ResourceMark rm(THREAD); 282 Exceptions::fthrow(THREAD_AND_LOCATION, 283 validationException, 284 "Type %s is not a nest member of %s: %s", 285 this->external_name(), 286 k->external_name(), 287 error 288 ); 289 } 290 return NULL; 291 } else { 292 if (log_is_enabled(Trace, class, nestmates)) { 293 ResourceMark rm(THREAD); 294 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self", 295 this->external_name()); 296 } 297 // save resolved nest-host value 298 return (_nest_host = this); 299 } 300 } 301 return nest_host_k; 302 } 303 304 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host, 305 // or we are k's nest_host - all of which is covered by comparing the two 306 // resolved_nest_hosts 307 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) { 308 309 assert(this != k, "this should be handled by higher-level code"); 310 311 // Per JVMS 5.4.4 we first resolve and validate the current class, then 312 // the target class k. Resolution exceptions will be passed on by upper 313 // layers. IncompatibleClassChangeErrors from membership validation failures 314 // will also be passed through. 315 316 Symbol* icce = vmSymbols::java_lang_IncompatibleClassChangeError(); 317 InstanceKlass* cur_host = nest_host(icce, CHECK_false); 318 if (cur_host == NULL) { 319 return false; 320 } 321 322 Klass* k_nest_host = k->nest_host(icce, CHECK_false); 323 if (k_nest_host == NULL) { 324 return false; 325 } 326 327 bool access = (cur_host == k_nest_host); 328 329 if (log_is_enabled(Trace, class, nestmates)) { 330 ResourceMark rm(THREAD); 331 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s", 332 this->external_name(), 333 access ? "" : "NOT ", 334 k->external_name()); 335 } 336 337 return access; 338 } 339 340 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) { 341 const int size = InstanceKlass::size(parser.vtable_size(), 342 parser.itable_size(), 343 nonstatic_oop_map_size(parser.total_oop_map_count()), 344 parser.is_interface(), 345 parser.is_anonymous(), 346 should_store_fingerprint(parser.is_anonymous()), 347 parser.has_flattenable_fields() ? parser.java_fields_count() : 0, 348 parser.is_value_type()); 349 350 const Symbol* const class_name = parser.class_name(); 351 assert(class_name != NULL, "invariant"); 352 ClassLoaderData* loader_data = parser.loader_data(); 353 assert(loader_data != NULL, "invariant"); 354 355 InstanceKlass* ik; 356 357 // Allocation 358 if (REF_NONE == parser.reference_type()) { 359 if (class_name == vmSymbols::java_lang_Class()) { 360 // mirror 361 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser); 362 } else if (is_class_loader(class_name, parser)) { 363 // class loader 364 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser); 365 } else if (parser.is_value_type()) { 366 // value type 367 ik = new (loader_data, size, THREAD) ValueKlass(parser); 368 } else { 369 // normal 370 ik = new (loader_data, size, THREAD) InstanceKlass(parser, InstanceKlass::_misc_kind_other); 371 } 372 } else { 373 // reference 374 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser); 375 } 376 377 // Check for pending exception before adding to the loader data and incrementing 378 // class count. Can get OOM here. 379 if (HAS_PENDING_EXCEPTION) { 380 return NULL; 381 } 382 383 #ifdef ASSERT 384 assert(ik->size() == size, ""); 385 ik->bounds_check((address) ik->start_of_vtable(), false, size); 386 ik->bounds_check((address) ik->start_of_itable(), false, size); 387 ik->bounds_check((address) ik->end_of_itable(), true, size); 388 ik->bounds_check((address) ik->end_of_nonstatic_oop_maps(), true, size); 389 #endif //ASSERT 390 return ik; 391 } 392 393 #ifndef PRODUCT 394 bool InstanceKlass::bounds_check(address addr, bool edge_ok, intptr_t size_in_bytes) const { 395 const char* bad = NULL; 396 address end = NULL; 397 if (addr < (address)this) { 398 bad = "before"; 399 } else if (addr == (address)this) { 400 if (edge_ok) return true; 401 bad = "just before"; 402 } else if (addr == (end = (address)this + sizeof(intptr_t) * (size_in_bytes < 0 ? size() : size_in_bytes))) { 403 if (edge_ok) return true; 404 bad = "just after"; 405 } else if (addr > end) { 406 bad = "after"; 407 } else { 408 return true; 409 } 410 tty->print_cr("%s object bounds: " INTPTR_FORMAT " [" INTPTR_FORMAT ".." INTPTR_FORMAT "]", 411 bad, (intptr_t)addr, (intptr_t)this, (intptr_t)end); 412 Verbose = WizardMode = true; this->print(); //@@ 413 return false; 414 } 415 #endif //PRODUCT 416 417 // copy method ordering from resource area to Metaspace 418 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) { 419 if (m != NULL) { 420 // allocate a new array and copy contents (memcpy?) 421 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK); 422 for (int i = 0; i < m->length(); i++) { 423 _method_ordering->at_put(i, m->at(i)); 424 } 425 } else { 426 _method_ordering = Universe::the_empty_int_array(); 427 } 428 } 429 430 // create a new array of vtable_indices for default methods 431 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) { 432 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL); 433 assert(default_vtable_indices() == NULL, "only create once"); 434 set_default_vtable_indices(vtable_indices); 435 return vtable_indices; 436 } 437 438 InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind, KlassID id) : 439 Klass(id), 440 _nest_members(NULL), 441 _nest_host_index(0), 442 _nest_host(NULL), 443 _static_field_size(parser.static_field_size()), 444 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())), 445 _itable_len(parser.itable_size()), 446 _extra_flags(0), 447 _reference_type(parser.reference_type()), 448 _adr_valueklass_fixed_block(NULL) { 449 set_vtable_length(parser.vtable_size()); 450 set_kind(kind); 451 set_access_flags(parser.access_flags()); 452 set_is_anonymous(parser.is_anonymous()); 453 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(), 454 false)); 455 if (parser.has_flattenable_fields()) { 456 set_has_value_fields(); 457 } 458 _java_fields_count = parser.java_fields_count(); 459 460 assert(NULL == _methods, "underlying memory not zeroed?"); 461 assert(is_instance_klass(), "is layout incorrect?"); 462 assert(size_helper() == parser.layout_size(), "incorrect size_helper?"); 463 } 464 465 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data, 466 Array<Method*>* methods) { 467 if (methods != NULL && methods != Universe::the_empty_method_array() && 468 !methods->is_shared()) { 469 for (int i = 0; i < methods->length(); i++) { 470 Method* method = methods->at(i); 471 if (method == NULL) continue; // maybe null if error processing 472 // Only want to delete methods that are not executing for RedefineClasses. 473 // The previous version will point to them so they're not totally dangling 474 assert (!method->on_stack(), "shouldn't be called with methods on stack"); 475 MetadataFactory::free_metadata(loader_data, method); 476 } 477 MetadataFactory::free_array<Method*>(loader_data, methods); 478 } 479 } 480 481 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data, 482 const Klass* super_klass, 483 Array<InstanceKlass*>* local_interfaces, 484 Array<InstanceKlass*>* transitive_interfaces) { 485 // Only deallocate transitive interfaces if not empty, same as super class 486 // or same as local interfaces. See code in parseClassFile. 487 Array<InstanceKlass*>* ti = transitive_interfaces; 488 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) { 489 // check that the interfaces don't come from super class 490 Array<InstanceKlass*>* sti = (super_klass == NULL) ? NULL : 491 InstanceKlass::cast(super_klass)->transitive_interfaces(); 492 if (ti != sti && ti != NULL && !ti->is_shared()) { 493 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti); 494 } 495 } 496 497 // local interfaces can be empty 498 if (local_interfaces != Universe::the_empty_instance_klass_array() && 499 local_interfaces != NULL && !local_interfaces->is_shared()) { 500 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces); 501 } 502 } 503 504 // This function deallocates the metadata and C heap pointers that the 505 // InstanceKlass points to. 506 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) { 507 508 // Orphan the mirror first, CMS thinks it's still live. 509 if (java_mirror() != NULL) { 510 java_lang_Class::set_klass(java_mirror(), NULL); 511 } 512 513 // Also remove mirror from handles 514 loader_data->remove_handle(_java_mirror); 515 516 // Need to take this class off the class loader data list. 517 loader_data->remove_class(this); 518 519 // The array_klass for this class is created later, after error handling. 520 // For class redefinition, we keep the original class so this scratch class 521 // doesn't have an array class. Either way, assert that there is nothing 522 // to deallocate. 523 assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet"); 524 525 // Release C heap allocated data that this might point to, which includes 526 // reference counting symbol names. 527 release_C_heap_structures(); 528 529 deallocate_methods(loader_data, methods()); 530 set_methods(NULL); 531 532 if (method_ordering() != NULL && 533 method_ordering() != Universe::the_empty_int_array() && 534 !method_ordering()->is_shared()) { 535 MetadataFactory::free_array<int>(loader_data, method_ordering()); 536 } 537 set_method_ordering(NULL); 538 539 // default methods can be empty 540 if (default_methods() != NULL && 541 default_methods() != Universe::the_empty_method_array() && 542 !default_methods()->is_shared()) { 543 MetadataFactory::free_array<Method*>(loader_data, default_methods()); 544 } 545 // Do NOT deallocate the default methods, they are owned by superinterfaces. 546 set_default_methods(NULL); 547 548 // default methods vtable indices can be empty 549 if (default_vtable_indices() != NULL && 550 !default_vtable_indices()->is_shared()) { 551 MetadataFactory::free_array<int>(loader_data, default_vtable_indices()); 552 } 553 set_default_vtable_indices(NULL); 554 555 556 // This array is in Klass, but remove it with the InstanceKlass since 557 // this place would be the only caller and it can share memory with transitive 558 // interfaces. 559 if (secondary_supers() != NULL && 560 secondary_supers() != Universe::the_empty_klass_array() && 561 // see comments in compute_secondary_supers about the following cast 562 (address)(secondary_supers()) != (address)(transitive_interfaces()) && 563 !secondary_supers()->is_shared()) { 564 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers()); 565 } 566 set_secondary_supers(NULL); 567 568 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces()); 569 set_transitive_interfaces(NULL); 570 set_local_interfaces(NULL); 571 572 if (fields() != NULL && !fields()->is_shared()) { 573 MetadataFactory::free_array<jushort>(loader_data, fields()); 574 } 575 set_fields(NULL, 0); 576 577 // If a method from a redefined class is using this constant pool, don't 578 // delete it, yet. The new class's previous version will point to this. 579 if (constants() != NULL) { 580 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack"); 581 if (!constants()->is_shared()) { 582 MetadataFactory::free_metadata(loader_data, constants()); 583 } 584 // Delete any cached resolution errors for the constant pool 585 SystemDictionary::delete_resolution_error(constants()); 586 587 set_constants(NULL); 588 } 589 590 if (inner_classes() != NULL && 591 inner_classes() != Universe::the_empty_short_array() && 592 !inner_classes()->is_shared()) { 593 MetadataFactory::free_array<jushort>(loader_data, inner_classes()); 594 } 595 set_inner_classes(NULL); 596 597 if (nest_members() != NULL && 598 nest_members() != Universe::the_empty_short_array() && 599 !nest_members()->is_shared()) { 600 MetadataFactory::free_array<jushort>(loader_data, nest_members()); 601 } 602 set_nest_members(NULL); 603 604 if (value_types() != NULL && !value_types()->is_shared()) { 605 MetadataFactory::free_array<ValueTypes>(loader_data, value_types()); 606 } 607 set_value_types(NULL); 608 609 // We should deallocate the Annotations instance if it's not in shared spaces. 610 if (annotations() != NULL && !annotations()->is_shared()) { 611 MetadataFactory::free_metadata(loader_data, annotations()); 612 } 613 set_annotations(NULL); 614 } 615 616 bool InstanceKlass::should_be_initialized() const { 617 return !is_initialized(); 618 } 619 620 klassItable InstanceKlass::itable() const { 621 return klassItable(const_cast<InstanceKlass*>(this)); 622 } 623 624 void InstanceKlass::eager_initialize(Thread *thread) { 625 if (!EagerInitialization) return; 626 627 if (this->is_not_initialized()) { 628 // abort if the the class has a class initializer 629 if (this->class_initializer() != NULL) return; 630 631 // abort if it is java.lang.Object (initialization is handled in genesis) 632 Klass* super_klass = super(); 633 if (super_klass == NULL) return; 634 635 // abort if the super class should be initialized 636 if (!InstanceKlass::cast(super_klass)->is_initialized()) return; 637 638 // call body to expose the this pointer 639 eager_initialize_impl(); 640 } 641 } 642 643 // JVMTI spec thinks there are signers and protection domain in the 644 // instanceKlass. These accessors pretend these fields are there. 645 // The hprof specification also thinks these fields are in InstanceKlass. 646 oop InstanceKlass::protection_domain() const { 647 // return the protection_domain from the mirror 648 return java_lang_Class::protection_domain(java_mirror()); 649 } 650 651 // To remove these from requires an incompatible change and CCC request. 652 objArrayOop InstanceKlass::signers() const { 653 // return the signers from the mirror 654 return java_lang_Class::signers(java_mirror()); 655 } 656 657 oop InstanceKlass::init_lock() const { 658 // return the init lock from the mirror 659 oop lock = java_lang_Class::init_lock(java_mirror()); 660 // Prevent reordering with any access of initialization state 661 OrderAccess::loadload(); 662 assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state 663 "only fully initialized state can have a null lock"); 664 return lock; 665 } 666 667 // Set the initialization lock to null so the object can be GC'ed. Any racing 668 // threads to get this lock will see a null lock and will not lock. 669 // That's okay because they all check for initialized state after getting 670 // the lock and return. 671 void InstanceKlass::fence_and_clear_init_lock() { 672 // make sure previous stores are all done, notably the init_state. 673 OrderAccess::storestore(); 674 java_lang_Class::set_init_lock(java_mirror(), NULL); 675 assert(!is_not_initialized(), "class must be initialized now"); 676 } 677 678 void InstanceKlass::eager_initialize_impl() { 679 EXCEPTION_MARK; 680 HandleMark hm(THREAD); 681 Handle h_init_lock(THREAD, init_lock()); 682 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 683 684 // abort if someone beat us to the initialization 685 if (!is_not_initialized()) return; // note: not equivalent to is_initialized() 686 687 ClassState old_state = init_state(); 688 link_class_impl(true, THREAD); 689 if (HAS_PENDING_EXCEPTION) { 690 CLEAR_PENDING_EXCEPTION; 691 // Abort if linking the class throws an exception. 692 693 // Use a test to avoid redundantly resetting the state if there's 694 // no change. Set_init_state() asserts that state changes make 695 // progress, whereas here we might just be spinning in place. 696 if (old_state != _init_state) 697 set_init_state(old_state); 698 } else { 699 // linking successfull, mark class as initialized 700 set_init_state(fully_initialized); 701 fence_and_clear_init_lock(); 702 // trace 703 if (log_is_enabled(Info, class, init)) { 704 ResourceMark rm(THREAD); 705 log_info(class, init)("[Initialized %s without side effects]", external_name()); 706 } 707 } 708 } 709 710 711 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization 712 // process. The step comments refers to the procedure described in that section. 713 // Note: implementation moved to static method to expose the this pointer. 714 void InstanceKlass::initialize(TRAPS) { 715 if (this->should_be_initialized()) { 716 initialize_impl(CHECK); 717 // Note: at this point the class may be initialized 718 // OR it may be in the state of being initialized 719 // in case of recursive initialization! 720 } else { 721 assert(is_initialized(), "sanity check"); 722 } 723 } 724 725 726 bool InstanceKlass::verify_code(bool throw_verifyerror, TRAPS) { 727 // 1) Verify the bytecodes 728 Verifier::Mode mode = 729 throw_verifyerror ? Verifier::ThrowException : Verifier::NoException; 730 return Verifier::verify(this, mode, should_verify_class(), THREAD); 731 } 732 733 734 // Used exclusively by the shared spaces dump mechanism to prevent 735 // classes mapped into the shared regions in new VMs from appearing linked. 736 737 void InstanceKlass::unlink_class() { 738 assert(is_linked(), "must be linked"); 739 _init_state = loaded; 740 } 741 742 void InstanceKlass::link_class(TRAPS) { 743 assert(is_loaded(), "must be loaded"); 744 if (!is_linked()) { 745 link_class_impl(true, CHECK); 746 } 747 } 748 749 // Called to verify that a class can link during initialization, without 750 // throwing a VerifyError. 751 bool InstanceKlass::link_class_or_fail(TRAPS) { 752 assert(is_loaded(), "must be loaded"); 753 if (!is_linked()) { 754 link_class_impl(false, CHECK_false); 755 } 756 return is_linked(); 757 } 758 759 bool InstanceKlass::link_class_impl(bool throw_verifyerror, TRAPS) { 760 if (DumpSharedSpaces && is_in_error_state()) { 761 // This is for CDS dumping phase only -- we use the in_error_state to indicate that 762 // the class has failed verification. Throwing the NoClassDefFoundError here is just 763 // a convenient way to stop repeat attempts to verify the same (bad) class. 764 // 765 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown 766 // if we are executing Java code. This is not a problem for CDS dumping phase since 767 // it doesn't execute any Java code. 768 ResourceMark rm(THREAD); 769 Exceptions::fthrow(THREAD_AND_LOCATION, 770 vmSymbols::java_lang_NoClassDefFoundError(), 771 "Class %s, or one of its supertypes, failed class initialization", 772 external_name()); 773 return false; 774 } 775 // return if already verified 776 if (is_linked()) { 777 return true; 778 } 779 780 // Timing 781 // timer handles recursion 782 assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl"); 783 JavaThread* jt = (JavaThread*)THREAD; 784 785 // link super class before linking this class 786 Klass* super_klass = super(); 787 if (super_klass != NULL) { 788 if (super_klass->is_interface()) { // check if super class is an interface 789 ResourceMark rm(THREAD); 790 Exceptions::fthrow( 791 THREAD_AND_LOCATION, 792 vmSymbols::java_lang_IncompatibleClassChangeError(), 793 "class %s has interface %s as super class", 794 external_name(), 795 super_klass->external_name() 796 ); 797 return false; 798 } 799 800 InstanceKlass* ik_super = InstanceKlass::cast(super_klass); 801 ik_super->link_class_impl(throw_verifyerror, CHECK_false); 802 } 803 804 // link all interfaces implemented by this class before linking this class 805 Array<InstanceKlass*>* interfaces = local_interfaces(); 806 int num_interfaces = interfaces->length(); 807 for (int index = 0; index < num_interfaces; index++) { 808 InstanceKlass* interk = interfaces->at(index); 809 interk->link_class_impl(throw_verifyerror, CHECK_false); 810 } 811 812 813 // If a class declares a method that uses a value class as an argument 814 // type or return value type, this value class must be loaded during the 815 // linking of this class because size and properties of the value class 816 // must be known in order to be able to perform value type optimizations. 817 // The implementation below is an approximation of this rule, the code 818 // iterates over all methods of the current class (including overridden 819 // methods), not only the methods declared by this class. This 820 // approximation makes the code simpler, and doesn't change the semantic 821 // because classes declaring methods overridden by the current class are 822 // linked (and have performed their own pre-loading) before the linking 823 // of the current class. 824 // This is also the moment to detect potential mismatch between the 825 // ValueTypes attribute and the kind of the class effectively loaded. 826 827 828 // Note: 829 // Value class types used for flattenable fields are loaded during 830 // the loading phase (see layout ClassFileParser::layout_fields()). 831 // Value class types used as element types for array creation 832 // are not pre-loaded. Their loading is triggered by either anewarray 833 // or multianewarray bytecodes. 834 835 if (has_value_types_attribute()) { 836 ResourceMark rm(THREAD); 837 for (int i = 0; i < methods()->length(); i++) { 838 Method* m = methods()->at(i); 839 for (SignatureStream ss(m->signature()); !ss.is_done(); ss.next()) { 840 Symbol* sig = ss.as_symbol(THREAD); 841 if (ss.is_object()) { 842 Symbol* symb = sig; 843 if (ss.is_array()) { 844 int i=0; 845 while (sig->byte_at(i) == '[') i++; 846 if (i == sig->utf8_length() - 1 ) continue; // primitive array 847 symb = SymbolTable::lookup(sig->as_C_string() + i + 1, 848 sig->utf8_length() - 3, CHECK_false); 849 } 850 if (is_declared_value_type(symb)) { 851 oop loader = class_loader(); 852 oop protection_domain = this->protection_domain(); 853 Klass* klass = SystemDictionary::resolve_or_fail(symb, 854 Handle(THREAD, loader), Handle(THREAD, protection_domain), true, 855 CHECK_false); 856 if (symb != sig) { 857 symb->decrement_refcount(); 858 } 859 if (klass == NULL) { 860 THROW_(vmSymbols::java_lang_LinkageError(), false); 861 } 862 if (!klass->is_value()) { 863 THROW_(vmSymbols::java_lang_IncompatibleClassChangeError(), false); 864 } 865 if (ss.at_return_type()) { 866 m->set_is_returning_vt(); 867 } 868 } 869 } 870 } 871 } 872 } 873 874 // in case the class is linked in the process of linking its superclasses 875 if (is_linked()) { 876 return true; 877 } 878 879 // trace only the link time for this klass that includes 880 // the verification time 881 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(), 882 ClassLoader::perf_class_link_selftime(), 883 ClassLoader::perf_classes_linked(), 884 jt->get_thread_stat()->perf_recursion_counts_addr(), 885 jt->get_thread_stat()->perf_timers_addr(), 886 PerfClassTraceTime::CLASS_LINK); 887 888 // verification & rewriting 889 { 890 HandleMark hm(THREAD); 891 Handle h_init_lock(THREAD, init_lock()); 892 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 893 // rewritten will have been set if loader constraint error found 894 // on an earlier link attempt 895 // don't verify or rewrite if already rewritten 896 // 897 898 if (!is_linked()) { 899 if (!is_rewritten()) { 900 { 901 bool verify_ok = verify_code(throw_verifyerror, THREAD); 902 if (!verify_ok) { 903 return false; 904 } 905 } 906 907 // Just in case a side-effect of verify linked this class already 908 // (which can sometimes happen since the verifier loads classes 909 // using custom class loaders, which are free to initialize things) 910 if (is_linked()) { 911 return true; 912 } 913 914 // also sets rewritten 915 rewrite_class(CHECK_false); 916 } else if (is_shared()) { 917 SystemDictionaryShared::check_verification_constraints(this, CHECK_false); 918 } 919 920 // relocate jsrs and link methods after they are all rewritten 921 link_methods(CHECK_false); 922 923 // Initialize the vtable and interface table after 924 // methods have been rewritten since rewrite may 925 // fabricate new Method*s. 926 // also does loader constraint checking 927 // 928 // initialize_vtable and initialize_itable need to be rerun for 929 // a shared class if the class is not loaded by the NULL classloader. 930 ClassLoaderData * loader_data = class_loader_data(); 931 if (!(is_shared() && 932 loader_data->is_the_null_class_loader_data())) { 933 vtable().initialize_vtable(true, CHECK_false); 934 itable().initialize_itable(true, CHECK_false); 935 } 936 #ifdef ASSERT 937 else { 938 vtable().verify(tty, true); 939 // In case itable verification is ever added. 940 // itable().verify(tty, true); 941 } 942 #endif 943 944 set_init_state(linked); 945 if (JvmtiExport::should_post_class_prepare()) { 946 Thread *thread = THREAD; 947 assert(thread->is_Java_thread(), "thread->is_Java_thread()"); 948 JvmtiExport::post_class_prepare((JavaThread *) thread, this); 949 } 950 } 951 } 952 return true; 953 } 954 955 // Rewrite the byte codes of all of the methods of a class. 956 // The rewriter must be called exactly once. Rewriting must happen after 957 // verification but before the first method of the class is executed. 958 void InstanceKlass::rewrite_class(TRAPS) { 959 assert(is_loaded(), "must be loaded"); 960 if (is_rewritten()) { 961 assert(is_shared(), "rewriting an unshared class?"); 962 return; 963 } 964 Rewriter::rewrite(this, CHECK); 965 set_rewritten(); 966 } 967 968 // Now relocate and link method entry points after class is rewritten. 969 // This is outside is_rewritten flag. In case of an exception, it can be 970 // executed more than once. 971 void InstanceKlass::link_methods(TRAPS) { 972 int len = methods()->length(); 973 for (int i = len-1; i >= 0; i--) { 974 methodHandle m(THREAD, methods()->at(i)); 975 976 // Set up method entry points for compiler and interpreter . 977 m->link_method(m, CHECK); 978 } 979 } 980 981 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access) 982 void InstanceKlass::initialize_super_interfaces(TRAPS) { 983 assert (has_nonstatic_concrete_methods(), "caller should have checked this"); 984 for (int i = 0; i < local_interfaces()->length(); ++i) { 985 InstanceKlass* ik = local_interfaces()->at(i); 986 987 // Initialization is depth first search ie. we start with top of the inheritance tree 988 // has_nonstatic_concrete_methods drives searching superinterfaces since it 989 // means has_nonstatic_concrete_methods in its superinterface hierarchy 990 if (ik->has_nonstatic_concrete_methods()) { 991 ik->initialize_super_interfaces(CHECK); 992 } 993 994 // Only initialize() interfaces that "declare" concrete methods. 995 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) { 996 ik->initialize(CHECK); 997 } 998 } 999 } 1000 1001 void InstanceKlass::initialize_impl(TRAPS) { 1002 HandleMark hm(THREAD); 1003 1004 // Make sure klass is linked (verified) before initialization 1005 // A class could already be verified, since it has been reflected upon. 1006 link_class(CHECK); 1007 1008 DTRACE_CLASSINIT_PROBE(required, -1); 1009 1010 bool wait = false; 1011 1012 // refer to the JVM book page 47 for description of steps 1013 // Step 1 1014 { 1015 Handle h_init_lock(THREAD, init_lock()); 1016 ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL); 1017 1018 Thread *self = THREAD; // it's passed the current thread 1019 1020 // Step 2 1021 // If we were to use wait() instead of waitInterruptibly() then 1022 // we might end up throwing IE from link/symbol resolution sites 1023 // that aren't expected to throw. This would wreak havoc. See 6320309. 1024 while(is_being_initialized() && !is_reentrant_initialization(self)) { 1025 wait = true; 1026 ol.waitUninterruptibly(CHECK); 1027 } 1028 1029 // Step 3 1030 if (is_being_initialized() && is_reentrant_initialization(self)) { 1031 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait); 1032 return; 1033 } 1034 1035 // Step 4 1036 if (is_initialized()) { 1037 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait); 1038 return; 1039 } 1040 1041 // Step 5 1042 if (is_in_error_state()) { 1043 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait); 1044 ResourceMark rm(THREAD); 1045 const char* desc = "Could not initialize class "; 1046 const char* className = external_name(); 1047 size_t msglen = strlen(desc) + strlen(className) + 1; 1048 char* message = NEW_RESOURCE_ARRAY(char, msglen); 1049 if (NULL == message) { 1050 // Out of memory: can't create detailed error message 1051 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className); 1052 } else { 1053 jio_snprintf(message, msglen, "%s%s", desc, className); 1054 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message); 1055 } 1056 } 1057 1058 // Step 6 1059 set_init_state(being_initialized); 1060 set_init_thread(self); 1061 } 1062 1063 // Step 7 1064 // Next, if C is a class rather than an interface, initialize it's super class and super 1065 // interfaces. 1066 if (!is_interface()) { 1067 Klass* super_klass = super(); 1068 if (super_klass != NULL && super_klass->should_be_initialized()) { 1069 super_klass->initialize(THREAD); 1070 } 1071 // If C implements any interface that declares a non-static, concrete method, 1072 // the initialization of C triggers initialization of its super interfaces. 1073 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and 1074 // having a superinterface that declares, non-static, concrete methods 1075 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) { 1076 initialize_super_interfaces(THREAD); 1077 } 1078 1079 // If any exceptions, complete abruptly, throwing the same exception as above. 1080 if (HAS_PENDING_EXCEPTION) { 1081 Handle e(THREAD, PENDING_EXCEPTION); 1082 CLEAR_PENDING_EXCEPTION; 1083 { 1084 EXCEPTION_MARK; 1085 // Locks object, set state, and notify all waiting threads 1086 set_initialization_state_and_notify(initialization_error, THREAD); 1087 CLEAR_PENDING_EXCEPTION; 1088 } 1089 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait); 1090 THROW_OOP(e()); 1091 } 1092 } 1093 1094 // Step 8 1095 // Initialize classes of flattenable fields 1096 { 1097 for (AllFieldStream fs(this); !fs.done(); fs.next()) { 1098 if (fs.is_flattenable()) { 1099 InstanceKlass* field_klass = InstanceKlass::cast(this->get_value_field_klass(fs.index())); 1100 field_klass->initialize(CHECK); 1101 } 1102 } 1103 } 1104 1105 1106 // Look for aot compiled methods for this klass, including class initializer. 1107 AOTLoader::load_for_klass(this, THREAD); 1108 1109 // Step 9 1110 { 1111 assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl"); 1112 JavaThread* jt = (JavaThread*)THREAD; 1113 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait); 1114 // Timer includes any side effects of class initialization (resolution, 1115 // etc), but not recursive entry into call_class_initializer(). 1116 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(), 1117 ClassLoader::perf_class_init_selftime(), 1118 ClassLoader::perf_classes_inited(), 1119 jt->get_thread_stat()->perf_recursion_counts_addr(), 1120 jt->get_thread_stat()->perf_timers_addr(), 1121 PerfClassTraceTime::CLASS_CLINIT); 1122 call_class_initializer(THREAD); 1123 } 1124 1125 // Step 10 1126 if (!HAS_PENDING_EXCEPTION) { 1127 set_initialization_state_and_notify(fully_initialized, CHECK); 1128 { 1129 debug_only(vtable().verify(tty, true);) 1130 } 1131 } 1132 else { 1133 // Step 11 and 12 1134 Handle e(THREAD, PENDING_EXCEPTION); 1135 CLEAR_PENDING_EXCEPTION; 1136 // JVMTI has already reported the pending exception 1137 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 1138 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 1139 { 1140 EXCEPTION_MARK; 1141 set_initialization_state_and_notify(initialization_error, THREAD); 1142 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below 1143 // JVMTI has already reported the pending exception 1144 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError 1145 JvmtiExport::clear_detected_exception((JavaThread*)THREAD); 1146 } 1147 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait); 1148 if (e->is_a(SystemDictionary::Error_klass())) { 1149 THROW_OOP(e()); 1150 } else { 1151 JavaCallArguments args(e); 1152 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(), 1153 vmSymbols::throwable_void_signature(), 1154 &args); 1155 } 1156 } 1157 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait); 1158 } 1159 1160 1161 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) { 1162 Handle h_init_lock(THREAD, init_lock()); 1163 if (h_init_lock() != NULL) { 1164 ObjectLocker ol(h_init_lock, THREAD); 1165 set_init_state(state); 1166 fence_and_clear_init_lock(); 1167 ol.notify_all(CHECK); 1168 } else { 1169 assert(h_init_lock() != NULL, "The initialization state should never be set twice"); 1170 set_init_state(state); 1171 } 1172 } 1173 1174 Klass* InstanceKlass::implementor() const { 1175 assert_locked_or_safepoint(Compile_lock); 1176 Klass** k = adr_implementor(); 1177 if (k == NULL) { 1178 return NULL; 1179 } else { 1180 return *k; 1181 } 1182 } 1183 1184 void InstanceKlass::set_implementor(Klass* k) { 1185 assert_lock_strong(Compile_lock); 1186 assert(is_interface(), "not interface"); 1187 Klass** addr = adr_implementor(); 1188 assert(addr != NULL, "null addr"); 1189 if (addr != NULL) { 1190 *addr = k; 1191 } 1192 } 1193 1194 int InstanceKlass::nof_implementors() const { 1195 assert_lock_strong(Compile_lock); 1196 Klass* k = implementor(); 1197 if (k == NULL) { 1198 return 0; 1199 } else if (k != this) { 1200 return 1; 1201 } else { 1202 return 2; 1203 } 1204 } 1205 1206 // The embedded _implementor field can only record one implementor. 1207 // When there are more than one implementors, the _implementor field 1208 // is set to the interface Klass* itself. Following are the possible 1209 // values for the _implementor field: 1210 // NULL - no implementor 1211 // implementor Klass* - one implementor 1212 // self - more than one implementor 1213 // 1214 // The _implementor field only exists for interfaces. 1215 void InstanceKlass::add_implementor(Klass* k) { 1216 assert_lock_strong(Compile_lock); 1217 assert(is_interface(), "not interface"); 1218 // Filter out my subinterfaces. 1219 // (Note: Interfaces are never on the subklass list.) 1220 if (InstanceKlass::cast(k)->is_interface()) return; 1221 1222 // Filter out subclasses whose supers already implement me. 1223 // (Note: CHA must walk subclasses of direct implementors 1224 // in order to locate indirect implementors.) 1225 Klass* sk = k->super(); 1226 if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this)) 1227 // We only need to check one immediate superclass, since the 1228 // implements_interface query looks at transitive_interfaces. 1229 // Any supers of the super have the same (or fewer) transitive_interfaces. 1230 return; 1231 1232 Klass* ik = implementor(); 1233 if (ik == NULL) { 1234 set_implementor(k); 1235 } else if (ik != this) { 1236 // There is already an implementor. Use itself as an indicator of 1237 // more than one implementors. 1238 set_implementor(this); 1239 } 1240 1241 // The implementor also implements the transitive_interfaces 1242 for (int index = 0; index < local_interfaces()->length(); index++) { 1243 InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k); 1244 } 1245 } 1246 1247 void InstanceKlass::init_implementor() { 1248 if (is_interface()) { 1249 set_implementor(NULL); 1250 } 1251 } 1252 1253 1254 void InstanceKlass::process_interfaces(Thread *thread) { 1255 // link this class into the implementors list of every interface it implements 1256 for (int i = local_interfaces()->length() - 1; i >= 0; i--) { 1257 assert(local_interfaces()->at(i)->is_klass(), "must be a klass"); 1258 InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i)); 1259 assert(interf->is_interface(), "expected interface"); 1260 interf->add_implementor(this); 1261 } 1262 } 1263 1264 bool InstanceKlass::can_be_primary_super_slow() const { 1265 if (is_interface()) 1266 return false; 1267 else 1268 return Klass::can_be_primary_super_slow(); 1269 } 1270 1271 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots, 1272 Array<InstanceKlass*>* transitive_interfaces) { 1273 // The secondaries are the implemented interfaces. 1274 Array<InstanceKlass*>* interfaces = transitive_interfaces; 1275 int num_secondaries = num_extra_slots + interfaces->length(); 1276 if (num_secondaries == 0) { 1277 // Must share this for correct bootstrapping! 1278 set_secondary_supers(Universe::the_empty_klass_array()); 1279 return NULL; 1280 } else if (num_extra_slots == 0) { 1281 // The secondary super list is exactly the same as the transitive interfaces, so 1282 // let's use it instead of making a copy. 1283 // Redefine classes has to be careful not to delete this! 1284 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>, 1285 // (but it's safe to do here because we won't write into _secondary_supers from this point on). 1286 set_secondary_supers((Array<Klass*>*)(address)interfaces); 1287 return NULL; 1288 } else { 1289 // Copy transitive interfaces to a temporary growable array to be constructed 1290 // into the secondary super list with extra slots. 1291 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length()); 1292 for (int i = 0; i < interfaces->length(); i++) { 1293 secondaries->push(interfaces->at(i)); 1294 } 1295 return secondaries; 1296 } 1297 } 1298 1299 bool InstanceKlass::compute_is_subtype_of(Klass* k) { 1300 if (k->is_interface()) { 1301 return implements_interface(k); 1302 } else { 1303 return Klass::compute_is_subtype_of(k); 1304 } 1305 } 1306 1307 bool InstanceKlass::implements_interface(Klass* k) const { 1308 if (this == k) return true; 1309 assert(k->is_interface(), "should be an interface class"); 1310 for (int i = 0; i < transitive_interfaces()->length(); i++) { 1311 if (transitive_interfaces()->at(i) == k) { 1312 return true; 1313 } 1314 } 1315 return false; 1316 } 1317 1318 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const { 1319 // Verify direct super interface 1320 if (this == k) return true; 1321 assert(k->is_interface(), "should be an interface class"); 1322 for (int i = 0; i < local_interfaces()->length(); i++) { 1323 if (local_interfaces()->at(i) == k) { 1324 return true; 1325 } 1326 } 1327 return false; 1328 } 1329 1330 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) { 1331 if (length < 0) { 1332 THROW_MSG_0(vmSymbols::java_lang_NegativeArraySizeException(), err_msg("%d", length)); 1333 } 1334 if (length > arrayOopDesc::max_array_length(T_OBJECT)) { 1335 report_java_out_of_memory("Requested array size exceeds VM limit"); 1336 JvmtiExport::post_array_size_exhausted(); 1337 THROW_OOP_0(Universe::out_of_memory_error_array_size()); 1338 } 1339 int size = objArrayOopDesc::object_size(length); 1340 Klass* ak = array_klass(n, CHECK_NULL); 1341 objArrayOop o = (objArrayOop)Universe::heap()->array_allocate(ak, size, length, 1342 /* do_zero */ true, CHECK_NULL); 1343 return o; 1344 } 1345 1346 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) { 1347 if (TraceFinalizerRegistration) { 1348 tty->print("Registered "); 1349 i->print_value_on(tty); 1350 tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i)); 1351 } 1352 instanceHandle h_i(THREAD, i); 1353 // Pass the handle as argument, JavaCalls::call expects oop as jobjects 1354 JavaValue result(T_VOID); 1355 JavaCallArguments args(h_i); 1356 methodHandle mh (THREAD, Universe::finalizer_register_method()); 1357 JavaCalls::call(&result, mh, &args, CHECK_NULL); 1358 return h_i(); 1359 } 1360 1361 instanceOop InstanceKlass::allocate_instance(TRAPS) { 1362 bool has_finalizer_flag = has_finalizer(); // Query before possible GC 1363 int size = size_helper(); // Query before forming handle. 1364 1365 instanceOop i; 1366 1367 i = (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL); 1368 if (has_finalizer_flag && !RegisterFinalizersAtInit) { 1369 i = register_finalizer(i, CHECK_NULL); 1370 } 1371 return i; 1372 } 1373 1374 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) { 1375 return instanceHandle(THREAD, allocate_instance(THREAD)); 1376 } 1377 1378 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) { 1379 if (is_interface() || is_abstract()) { 1380 ResourceMark rm(THREAD); 1381 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError() 1382 : vmSymbols::java_lang_InstantiationException(), external_name()); 1383 } 1384 if (this == SystemDictionary::Class_klass()) { 1385 ResourceMark rm(THREAD); 1386 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError() 1387 : vmSymbols::java_lang_IllegalAccessException(), external_name()); 1388 } 1389 } 1390 1391 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) { 1392 // Need load-acquire for lock-free read 1393 if (array_klasses_acquire() == NULL) { 1394 if (or_null) return NULL; 1395 1396 ResourceMark rm; 1397 JavaThread *jt = (JavaThread *)THREAD; 1398 { 1399 // Atomic creation of array_klasses 1400 MutexLocker mc(Compile_lock, THREAD); // for vtables 1401 MutexLocker ma(MultiArray_lock, THREAD); 1402 1403 // Check if update has already taken place 1404 if (array_klasses() == NULL) { 1405 Klass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL); 1406 // use 'release' to pair with lock-free load 1407 release_set_array_klasses(k); 1408 } 1409 } 1410 } 1411 // _this will always be set at this point 1412 ObjArrayKlass* oak = (ObjArrayKlass*)array_klasses(); 1413 if (or_null) { 1414 return oak->array_klass_or_null(n); 1415 } 1416 return oak->array_klass(n, THREAD); 1417 } 1418 1419 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) { 1420 return array_klass_impl(or_null, 1, THREAD); 1421 } 1422 1423 static int call_class_initializer_counter = 0; // for debugging 1424 1425 Method* InstanceKlass::class_initializer() const { 1426 Method* clinit = find_method( 1427 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature()); 1428 if (clinit != NULL && clinit->has_valid_initializer_flags()) { 1429 return clinit; 1430 } 1431 return NULL; 1432 } 1433 1434 void InstanceKlass::call_class_initializer(TRAPS) { 1435 if (ReplayCompiles && 1436 (ReplaySuppressInitializers == 1 || 1437 (ReplaySuppressInitializers >= 2 && class_loader() != NULL))) { 1438 // Hide the existence of the initializer for the purpose of replaying the compile 1439 return; 1440 } 1441 1442 methodHandle h_method(THREAD, class_initializer()); 1443 assert(!is_initialized(), "we cannot initialize twice"); 1444 LogTarget(Info, class, init) lt; 1445 if (lt.is_enabled()) { 1446 ResourceMark rm; 1447 LogStream ls(lt); 1448 ls.print("%d Initializing ", call_class_initializer_counter++); 1449 name()->print_value_on(&ls); 1450 ls.print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this)); 1451 } 1452 if (h_method() != NULL) { 1453 JavaCallArguments args; // No arguments 1454 JavaValue result(T_VOID); 1455 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args) 1456 } 1457 } 1458 1459 1460 void InstanceKlass::mask_for(const methodHandle& method, int bci, 1461 InterpreterOopMap* entry_for) { 1462 // Lazily create the _oop_map_cache at first request 1463 // Lock-free access requires load_acquire. 1464 OopMapCache* oop_map_cache = OrderAccess::load_acquire(&_oop_map_cache); 1465 if (oop_map_cache == NULL) { 1466 MutexLockerEx x(OopMapCacheAlloc_lock, Mutex::_no_safepoint_check_flag); 1467 // Check if _oop_map_cache was allocated while we were waiting for this lock 1468 if ((oop_map_cache = _oop_map_cache) == NULL) { 1469 oop_map_cache = new OopMapCache(); 1470 // Ensure _oop_map_cache is stable, since it is examined without a lock 1471 OrderAccess::release_store(&_oop_map_cache, oop_map_cache); 1472 } 1473 } 1474 // _oop_map_cache is constant after init; lookup below does its own locking. 1475 oop_map_cache->lookup(method, bci, entry_for); 1476 } 1477 1478 1479 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1480 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1481 Symbol* f_name = fs.name(); 1482 Symbol* f_sig = fs.signature(); 1483 if (f_name == name && f_sig == sig) { 1484 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1485 return true; 1486 } 1487 } 1488 return false; 1489 } 1490 1491 1492 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1493 const int n = local_interfaces()->length(); 1494 for (int i = 0; i < n; i++) { 1495 Klass* intf1 = local_interfaces()->at(i); 1496 assert(intf1->is_interface(), "just checking type"); 1497 // search for field in current interface 1498 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) { 1499 assert(fd->is_static(), "interface field must be static"); 1500 return intf1; 1501 } 1502 // search for field in direct superinterfaces 1503 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd); 1504 if (intf2 != NULL) return intf2; 1505 } 1506 // otherwise field lookup fails 1507 return NULL; 1508 } 1509 1510 1511 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const { 1512 // search order according to newest JVM spec (5.4.3.2, p.167). 1513 // 1) search for field in current klass 1514 if (find_local_field(name, sig, fd)) { 1515 return const_cast<InstanceKlass*>(this); 1516 } 1517 // 2) search for field recursively in direct superinterfaces 1518 { Klass* intf = find_interface_field(name, sig, fd); 1519 if (intf != NULL) return intf; 1520 } 1521 // 3) apply field lookup recursively if superclass exists 1522 { Klass* supr = super(); 1523 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd); 1524 } 1525 // 4) otherwise field lookup fails 1526 return NULL; 1527 } 1528 1529 1530 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const { 1531 // search order according to newest JVM spec (5.4.3.2, p.167). 1532 // 1) search for field in current klass 1533 if (find_local_field(name, sig, fd)) { 1534 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this); 1535 } 1536 // 2) search for field recursively in direct superinterfaces 1537 if (is_static) { 1538 Klass* intf = find_interface_field(name, sig, fd); 1539 if (intf != NULL) return intf; 1540 } 1541 // 3) apply field lookup recursively if superclass exists 1542 { Klass* supr = super(); 1543 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd); 1544 } 1545 // 4) otherwise field lookup fails 1546 return NULL; 1547 } 1548 1549 1550 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1551 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1552 if (fs.offset() == offset) { 1553 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index()); 1554 if (fd->is_static() == is_static) return true; 1555 } 1556 } 1557 return false; 1558 } 1559 1560 1561 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const { 1562 Klass* klass = const_cast<InstanceKlass*>(this); 1563 while (klass != NULL) { 1564 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) { 1565 return true; 1566 } 1567 klass = klass->super(); 1568 } 1569 return false; 1570 } 1571 1572 1573 void InstanceKlass::methods_do(void f(Method* method)) { 1574 // Methods aren't stable until they are loaded. This can be read outside 1575 // a lock through the ClassLoaderData for profiling 1576 if (!is_loaded()) { 1577 return; 1578 } 1579 1580 int len = methods()->length(); 1581 for (int index = 0; index < len; index++) { 1582 Method* m = methods()->at(index); 1583 assert(m->is_method(), "must be method"); 1584 f(m); 1585 } 1586 } 1587 1588 1589 void InstanceKlass::do_local_static_fields(FieldClosure* cl) { 1590 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1591 if (fs.access_flags().is_static()) { 1592 fieldDescriptor& fd = fs.field_descriptor(); 1593 cl->do_field(&fd); 1594 } 1595 } 1596 } 1597 1598 1599 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) { 1600 for (JavaFieldStream fs(this); !fs.done(); fs.next()) { 1601 if (fs.access_flags().is_static()) { 1602 fieldDescriptor& fd = fs.field_descriptor(); 1603 f(&fd, mirror, CHECK); 1604 } 1605 } 1606 } 1607 1608 1609 static int compare_fields_by_offset(int* a, int* b) { 1610 return a[0] - b[0]; 1611 } 1612 1613 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) { 1614 InstanceKlass* super = superklass(); 1615 if (super != NULL) { 1616 super->do_nonstatic_fields(cl); 1617 } 1618 fieldDescriptor fd; 1619 int length = java_fields_count(); 1620 // In DebugInfo nonstatic fields are sorted by offset. 1621 int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass); 1622 int j = 0; 1623 for (int i = 0; i < length; i += 1) { 1624 fd.reinitialize(this, i); 1625 if (!fd.is_static()) { 1626 fields_sorted[j + 0] = fd.offset(); 1627 fields_sorted[j + 1] = i; 1628 j += 2; 1629 } 1630 } 1631 if (j > 0) { 1632 length = j; 1633 // _sort_Fn is defined in growableArray.hpp. 1634 qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset); 1635 for (int i = 0; i < length; i += 2) { 1636 fd.reinitialize(this, fields_sorted[i + 1]); 1637 assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields"); 1638 cl->do_field(&fd); 1639 } 1640 } 1641 FREE_C_HEAP_ARRAY(int, fields_sorted); 1642 } 1643 1644 1645 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) { 1646 if (array_klasses() != NULL) 1647 ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD); 1648 } 1649 1650 void InstanceKlass::array_klasses_do(void f(Klass* k)) { 1651 if (array_klasses() != NULL) 1652 ArrayKlass::cast(array_klasses())->array_klasses_do(f); 1653 } 1654 1655 #ifdef ASSERT 1656 static int linear_search(const Array<Method*>* methods, 1657 const Symbol* name, 1658 const Symbol* signature) { 1659 const int len = methods->length(); 1660 for (int index = 0; index < len; index++) { 1661 const Method* const m = methods->at(index); 1662 assert(m->is_method(), "must be method"); 1663 if (m->signature() == signature && m->name() == name) { 1664 return index; 1665 } 1666 } 1667 return -1; 1668 } 1669 #endif 1670 1671 static int binary_search(const Array<Method*>* methods, const Symbol* name) { 1672 int len = methods->length(); 1673 // methods are sorted, so do binary search 1674 int l = 0; 1675 int h = len - 1; 1676 while (l <= h) { 1677 int mid = (l + h) >> 1; 1678 Method* m = methods->at(mid); 1679 assert(m->is_method(), "must be method"); 1680 int res = m->name()->fast_compare(name); 1681 if (res == 0) { 1682 return mid; 1683 } else if (res < 0) { 1684 l = mid + 1; 1685 } else { 1686 h = mid - 1; 1687 } 1688 } 1689 return -1; 1690 } 1691 1692 // find_method looks up the name/signature in the local methods array 1693 Method* InstanceKlass::find_method(const Symbol* name, 1694 const Symbol* signature) const { 1695 return find_method_impl(name, signature, find_overpass, find_static, find_private); 1696 } 1697 1698 Method* InstanceKlass::find_method_impl(const Symbol* name, 1699 const Symbol* signature, 1700 OverpassLookupMode overpass_mode, 1701 StaticLookupMode static_mode, 1702 PrivateLookupMode private_mode) const { 1703 return InstanceKlass::find_method_impl(methods(), 1704 name, 1705 signature, 1706 overpass_mode, 1707 static_mode, 1708 private_mode); 1709 } 1710 1711 // find_instance_method looks up the name/signature in the local methods array 1712 // and skips over static methods 1713 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods, 1714 const Symbol* name, 1715 const Symbol* signature, 1716 PrivateLookupMode private_mode) { 1717 Method* const meth = InstanceKlass::find_method_impl(methods, 1718 name, 1719 signature, 1720 find_overpass, 1721 skip_static, 1722 private_mode); 1723 assert(((meth == NULL) || !meth->is_static()), 1724 "find_instance_method should have skipped statics"); 1725 return meth; 1726 } 1727 1728 // find_instance_method looks up the name/signature in the local methods array 1729 // and skips over static methods 1730 Method* InstanceKlass::find_instance_method(const Symbol* name, 1731 const Symbol* signature, 1732 PrivateLookupMode private_mode) const { 1733 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode); 1734 } 1735 1736 // Find looks up the name/signature in the local methods array 1737 // and filters on the overpass, static and private flags 1738 // This returns the first one found 1739 // note that the local methods array can have up to one overpass, one static 1740 // and one instance (private or not) with the same name/signature 1741 Method* InstanceKlass::find_local_method(const Symbol* name, 1742 const Symbol* signature, 1743 OverpassLookupMode overpass_mode, 1744 StaticLookupMode static_mode, 1745 PrivateLookupMode private_mode) const { 1746 return InstanceKlass::find_method_impl(methods(), 1747 name, 1748 signature, 1749 overpass_mode, 1750 static_mode, 1751 private_mode); 1752 } 1753 1754 // Find looks up the name/signature in the local methods array 1755 // and filters on the overpass, static and private flags 1756 // This returns the first one found 1757 // note that the local methods array can have up to one overpass, one static 1758 // and one instance (private or not) with the same name/signature 1759 Method* InstanceKlass::find_local_method(const Array<Method*>* methods, 1760 const Symbol* name, 1761 const Symbol* signature, 1762 OverpassLookupMode overpass_mode, 1763 StaticLookupMode static_mode, 1764 PrivateLookupMode private_mode) { 1765 return InstanceKlass::find_method_impl(methods, 1766 name, 1767 signature, 1768 overpass_mode, 1769 static_mode, 1770 private_mode); 1771 } 1772 1773 Method* InstanceKlass::find_method(const Array<Method*>* methods, 1774 const Symbol* name, 1775 const Symbol* signature) { 1776 return InstanceKlass::find_method_impl(methods, 1777 name, 1778 signature, 1779 find_overpass, 1780 find_static, 1781 find_private); 1782 } 1783 1784 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods, 1785 const Symbol* name, 1786 const Symbol* signature, 1787 OverpassLookupMode overpass_mode, 1788 StaticLookupMode static_mode, 1789 PrivateLookupMode private_mode) { 1790 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode); 1791 return hit >= 0 ? methods->at(hit): NULL; 1792 } 1793 1794 // true if method matches signature and conforms to skipping_X conditions. 1795 static bool method_matches(const Method* m, 1796 const Symbol* signature, 1797 bool skipping_overpass, 1798 bool skipping_static, 1799 bool skipping_private) { 1800 return ((m->signature() == signature) && 1801 (!skipping_overpass || !m->is_overpass()) && 1802 (!skipping_static || !m->is_static()) && 1803 (!skipping_private || !m->is_private())); 1804 } 1805 1806 // Used directly for default_methods to find the index into the 1807 // default_vtable_indices, and indirectly by find_method 1808 // find_method_index looks in the local methods array to return the index 1809 // of the matching name/signature. If, overpass methods are being ignored, 1810 // the search continues to find a potential non-overpass match. This capability 1811 // is important during method resolution to prefer a static method, for example, 1812 // over an overpass method. 1813 // There is the possibility in any _method's array to have the same name/signature 1814 // for a static method, an overpass method and a local instance method 1815 // To correctly catch a given method, the search criteria may need 1816 // to explicitly skip the other two. For local instance methods, it 1817 // is often necessary to skip private methods 1818 int InstanceKlass::find_method_index(const Array<Method*>* methods, 1819 const Symbol* name, 1820 const Symbol* signature, 1821 OverpassLookupMode overpass_mode, 1822 StaticLookupMode static_mode, 1823 PrivateLookupMode private_mode) { 1824 const bool skipping_overpass = (overpass_mode == skip_overpass); 1825 const bool skipping_static = (static_mode == skip_static); 1826 const bool skipping_private = (private_mode == skip_private); 1827 const int hit = binary_search(methods, name); 1828 if (hit != -1) { 1829 const Method* const m = methods->at(hit); 1830 1831 // Do linear search to find matching signature. First, quick check 1832 // for common case, ignoring overpasses if requested. 1833 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1834 return hit; 1835 } 1836 1837 // search downwards through overloaded methods 1838 int i; 1839 for (i = hit - 1; i >= 0; --i) { 1840 const Method* const m = methods->at(i); 1841 assert(m->is_method(), "must be method"); 1842 if (m->name() != name) { 1843 break; 1844 } 1845 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1846 return i; 1847 } 1848 } 1849 // search upwards 1850 for (i = hit + 1; i < methods->length(); ++i) { 1851 const Method* const m = methods->at(i); 1852 assert(m->is_method(), "must be method"); 1853 if (m->name() != name) { 1854 break; 1855 } 1856 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) { 1857 return i; 1858 } 1859 } 1860 // not found 1861 #ifdef ASSERT 1862 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 : 1863 linear_search(methods, name, signature); 1864 assert(-1 == index, "binary search should have found entry %d", index); 1865 #endif 1866 } 1867 return -1; 1868 } 1869 1870 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const { 1871 return find_method_by_name(methods(), name, end); 1872 } 1873 1874 int InstanceKlass::find_method_by_name(const Array<Method*>* methods, 1875 const Symbol* name, 1876 int* end_ptr) { 1877 assert(end_ptr != NULL, "just checking"); 1878 int start = binary_search(methods, name); 1879 int end = start + 1; 1880 if (start != -1) { 1881 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start; 1882 while (end < methods->length() && (methods->at(end))->name() == name) ++end; 1883 *end_ptr = end; 1884 return start; 1885 } 1886 return -1; 1887 } 1888 1889 // uncached_lookup_method searches both the local class methods array and all 1890 // superclasses methods arrays, skipping any overpass methods in superclasses, 1891 // and possibly skipping private methods. 1892 Method* InstanceKlass::uncached_lookup_method(const Symbol* name, 1893 const Symbol* signature, 1894 OverpassLookupMode overpass_mode, 1895 PrivateLookupMode private_mode) const { 1896 OverpassLookupMode overpass_local_mode = overpass_mode; 1897 const Klass* klass = this; 1898 while (klass != NULL) { 1899 Method* const method = InstanceKlass::cast(klass)->find_method_impl(name, 1900 signature, 1901 overpass_local_mode, 1902 find_static, 1903 private_mode); 1904 if (method != NULL) { 1905 return method; 1906 } 1907 klass = klass->super(); 1908 overpass_local_mode = skip_overpass; // Always ignore overpass methods in superclasses 1909 } 1910 return NULL; 1911 } 1912 1913 #ifdef ASSERT 1914 // search through class hierarchy and return true if this class or 1915 // one of the superclasses was redefined 1916 bool InstanceKlass::has_redefined_this_or_super() const { 1917 const Klass* klass = this; 1918 while (klass != NULL) { 1919 if (InstanceKlass::cast(klass)->has_been_redefined()) { 1920 return true; 1921 } 1922 klass = klass->super(); 1923 } 1924 return false; 1925 } 1926 #endif 1927 1928 // lookup a method in the default methods list then in all transitive interfaces 1929 // Do NOT return private or static methods 1930 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name, 1931 Symbol* signature) const { 1932 Method* m = NULL; 1933 if (default_methods() != NULL) { 1934 m = find_method(default_methods(), name, signature); 1935 } 1936 // Look up interfaces 1937 if (m == NULL) { 1938 m = lookup_method_in_all_interfaces(name, signature, find_defaults); 1939 } 1940 return m; 1941 } 1942 1943 // lookup a method in all the interfaces that this class implements 1944 // Do NOT return private or static methods, new in JDK8 which are not externally visible 1945 // They should only be found in the initial InterfaceMethodRef 1946 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name, 1947 Symbol* signature, 1948 DefaultsLookupMode defaults_mode) const { 1949 Array<InstanceKlass*>* all_ifs = transitive_interfaces(); 1950 int num_ifs = all_ifs->length(); 1951 InstanceKlass *ik = NULL; 1952 for (int i = 0; i < num_ifs; i++) { 1953 ik = all_ifs->at(i); 1954 Method* m = ik->lookup_method(name, signature); 1955 if (m != NULL && m->is_public() && !m->is_static() && 1956 ((defaults_mode != skip_defaults) || !m->is_default_method())) { 1957 return m; 1958 } 1959 } 1960 return NULL; 1961 } 1962 1963 /* jni_id_for_impl for jfieldIds only */ 1964 JNIid* InstanceKlass::jni_id_for_impl(int offset) { 1965 MutexLocker ml(JfieldIdCreation_lock); 1966 // Retry lookup after we got the lock 1967 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1968 if (probe == NULL) { 1969 // Slow case, allocate new static field identifier 1970 probe = new JNIid(this, offset, jni_ids()); 1971 set_jni_ids(probe); 1972 } 1973 return probe; 1974 } 1975 1976 1977 /* jni_id_for for jfieldIds only */ 1978 JNIid* InstanceKlass::jni_id_for(int offset) { 1979 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset); 1980 if (probe == NULL) { 1981 probe = jni_id_for_impl(offset); 1982 } 1983 return probe; 1984 } 1985 1986 u2 InstanceKlass::enclosing_method_data(int offset) const { 1987 const Array<jushort>* const inner_class_list = inner_classes(); 1988 if (inner_class_list == NULL) { 1989 return 0; 1990 } 1991 const int length = inner_class_list->length(); 1992 if (length % inner_class_next_offset == 0) { 1993 return 0; 1994 } 1995 const int index = length - enclosing_method_attribute_size; 1996 assert(offset < enclosing_method_attribute_size, "invalid offset"); 1997 return inner_class_list->at(index + offset); 1998 } 1999 2000 void InstanceKlass::set_enclosing_method_indices(u2 class_index, 2001 u2 method_index) { 2002 Array<jushort>* inner_class_list = inner_classes(); 2003 assert (inner_class_list != NULL, "_inner_classes list is not set up"); 2004 int length = inner_class_list->length(); 2005 if (length % inner_class_next_offset == enclosing_method_attribute_size) { 2006 int index = length - enclosing_method_attribute_size; 2007 inner_class_list->at_put( 2008 index + enclosing_method_class_index_offset, class_index); 2009 inner_class_list->at_put( 2010 index + enclosing_method_method_index_offset, method_index); 2011 } 2012 } 2013 2014 // Lookup or create a jmethodID. 2015 // This code is called by the VMThread and JavaThreads so the 2016 // locking has to be done very carefully to avoid deadlocks 2017 // and/or other cache consistency problems. 2018 // 2019 jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) { 2020 size_t idnum = (size_t)method_h->method_idnum(); 2021 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2022 size_t length = 0; 2023 jmethodID id = NULL; 2024 2025 // We use a double-check locking idiom here because this cache is 2026 // performance sensitive. In the normal system, this cache only 2027 // transitions from NULL to non-NULL which is safe because we use 2028 // release_set_methods_jmethod_ids() to advertise the new cache. 2029 // A partially constructed cache should never be seen by a racing 2030 // thread. We also use release_store() to save a new jmethodID 2031 // in the cache so a partially constructed jmethodID should never be 2032 // seen either. Cache reads of existing jmethodIDs proceed without a 2033 // lock, but cache writes of a new jmethodID requires uniqueness and 2034 // creation of the cache itself requires no leaks so a lock is 2035 // generally acquired in those two cases. 2036 // 2037 // If the RedefineClasses() API has been used, then this cache can 2038 // grow and we'll have transitions from non-NULL to bigger non-NULL. 2039 // Cache creation requires no leaks and we require safety between all 2040 // cache accesses and freeing of the old cache so a lock is generally 2041 // acquired when the RedefineClasses() API has been used. 2042 2043 if (jmeths != NULL) { 2044 // the cache already exists 2045 if (!idnum_can_increment()) { 2046 // the cache can't grow so we can just get the current values 2047 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 2048 } else { 2049 // cache can grow so we have to be more careful 2050 if (Threads::number_of_threads() == 0 || 2051 SafepointSynchronize::is_at_safepoint()) { 2052 // we're single threaded or at a safepoint - no locking needed 2053 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 2054 } else { 2055 MutexLocker ml(JmethodIdCreation_lock); 2056 get_jmethod_id_length_value(jmeths, idnum, &length, &id); 2057 } 2058 } 2059 } 2060 // implied else: 2061 // we need to allocate a cache so default length and id values are good 2062 2063 if (jmeths == NULL || // no cache yet 2064 length <= idnum || // cache is too short 2065 id == NULL) { // cache doesn't contain entry 2066 2067 // This function can be called by the VMThread so we have to do all 2068 // things that might block on a safepoint before grabbing the lock. 2069 // Otherwise, we can deadlock with the VMThread or have a cache 2070 // consistency issue. These vars keep track of what we might have 2071 // to free after the lock is dropped. 2072 jmethodID to_dealloc_id = NULL; 2073 jmethodID* to_dealloc_jmeths = NULL; 2074 2075 // may not allocate new_jmeths or use it if we allocate it 2076 jmethodID* new_jmeths = NULL; 2077 if (length <= idnum) { 2078 // allocate a new cache that might be used 2079 size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count()); 2080 new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass); 2081 memset(new_jmeths, 0, (size+1)*sizeof(jmethodID)); 2082 // cache size is stored in element[0], other elements offset by one 2083 new_jmeths[0] = (jmethodID)size; 2084 } 2085 2086 // allocate a new jmethodID that might be used 2087 jmethodID new_id = NULL; 2088 if (method_h->is_old() && !method_h->is_obsolete()) { 2089 // The method passed in is old (but not obsolete), we need to use the current version 2090 Method* current_method = method_with_idnum((int)idnum); 2091 assert(current_method != NULL, "old and but not obsolete, so should exist"); 2092 new_id = Method::make_jmethod_id(class_loader_data(), current_method); 2093 } else { 2094 // It is the current version of the method or an obsolete method, 2095 // use the version passed in 2096 new_id = Method::make_jmethod_id(class_loader_data(), method_h()); 2097 } 2098 2099 if (Threads::number_of_threads() == 0 || 2100 SafepointSynchronize::is_at_safepoint()) { 2101 // we're single threaded or at a safepoint - no locking needed 2102 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths, 2103 &to_dealloc_id, &to_dealloc_jmeths); 2104 } else { 2105 MutexLocker ml(JmethodIdCreation_lock); 2106 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths, 2107 &to_dealloc_id, &to_dealloc_jmeths); 2108 } 2109 2110 // The lock has been dropped so we can free resources. 2111 // Free up either the old cache or the new cache if we allocated one. 2112 if (to_dealloc_jmeths != NULL) { 2113 FreeHeap(to_dealloc_jmeths); 2114 } 2115 // free up the new ID since it wasn't needed 2116 if (to_dealloc_id != NULL) { 2117 Method::destroy_jmethod_id(class_loader_data(), to_dealloc_id); 2118 } 2119 } 2120 return id; 2121 } 2122 2123 // Figure out how many jmethodIDs haven't been allocated, and make 2124 // sure space for them is pre-allocated. This makes getting all 2125 // method ids much, much faster with classes with more than 8 2126 // methods, and has a *substantial* effect on performance with jvmti 2127 // code that loads all jmethodIDs for all classes. 2128 void InstanceKlass::ensure_space_for_methodids(int start_offset) { 2129 int new_jmeths = 0; 2130 int length = methods()->length(); 2131 for (int index = start_offset; index < length; index++) { 2132 Method* m = methods()->at(index); 2133 jmethodID id = m->find_jmethod_id_or_null(); 2134 if (id == NULL) { 2135 new_jmeths++; 2136 } 2137 } 2138 if (new_jmeths != 0) { 2139 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths); 2140 } 2141 } 2142 2143 // Common code to fetch the jmethodID from the cache or update the 2144 // cache with the new jmethodID. This function should never do anything 2145 // that causes the caller to go to a safepoint or we can deadlock with 2146 // the VMThread or have cache consistency issues. 2147 // 2148 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update( 2149 size_t idnum, jmethodID new_id, 2150 jmethodID* new_jmeths, jmethodID* to_dealloc_id_p, 2151 jmethodID** to_dealloc_jmeths_p) { 2152 assert(new_id != NULL, "sanity check"); 2153 assert(to_dealloc_id_p != NULL, "sanity check"); 2154 assert(to_dealloc_jmeths_p != NULL, "sanity check"); 2155 assert(Threads::number_of_threads() == 0 || 2156 SafepointSynchronize::is_at_safepoint() || 2157 JmethodIdCreation_lock->owned_by_self(), "sanity check"); 2158 2159 // reacquire the cache - we are locked, single threaded or at a safepoint 2160 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2161 jmethodID id = NULL; 2162 size_t length = 0; 2163 2164 if (jmeths == NULL || // no cache yet 2165 (length = (size_t)jmeths[0]) <= idnum) { // cache is too short 2166 if (jmeths != NULL) { 2167 // copy any existing entries from the old cache 2168 for (size_t index = 0; index < length; index++) { 2169 new_jmeths[index+1] = jmeths[index+1]; 2170 } 2171 *to_dealloc_jmeths_p = jmeths; // save old cache for later delete 2172 } 2173 release_set_methods_jmethod_ids(jmeths = new_jmeths); 2174 } else { 2175 // fetch jmethodID (if any) from the existing cache 2176 id = jmeths[idnum+1]; 2177 *to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete 2178 } 2179 if (id == NULL) { 2180 // No matching jmethodID in the existing cache or we have a new 2181 // cache or we just grew the cache. This cache write is done here 2182 // by the first thread to win the foot race because a jmethodID 2183 // needs to be unique once it is generally available. 2184 id = new_id; 2185 2186 // The jmethodID cache can be read while unlocked so we have to 2187 // make sure the new jmethodID is complete before installing it 2188 // in the cache. 2189 OrderAccess::release_store(&jmeths[idnum+1], id); 2190 } else { 2191 *to_dealloc_id_p = new_id; // save new id for later delete 2192 } 2193 return id; 2194 } 2195 2196 2197 // Common code to get the jmethodID cache length and the jmethodID 2198 // value at index idnum if there is one. 2199 // 2200 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache, 2201 size_t idnum, size_t *length_p, jmethodID* id_p) { 2202 assert(cache != NULL, "sanity check"); 2203 assert(length_p != NULL, "sanity check"); 2204 assert(id_p != NULL, "sanity check"); 2205 2206 // cache size is stored in element[0], other elements offset by one 2207 *length_p = (size_t)cache[0]; 2208 if (*length_p <= idnum) { // cache is too short 2209 *id_p = NULL; 2210 } else { 2211 *id_p = cache[idnum+1]; // fetch jmethodID (if any) 2212 } 2213 } 2214 2215 2216 // Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles 2217 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) { 2218 size_t idnum = (size_t)method->method_idnum(); 2219 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2220 size_t length; // length assigned as debugging crumb 2221 jmethodID id = NULL; 2222 if (jmeths != NULL && // If there is a cache 2223 (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough, 2224 id = jmeths[idnum+1]; // Look up the id (may be NULL) 2225 } 2226 return id; 2227 } 2228 2229 inline DependencyContext InstanceKlass::dependencies() { 2230 DependencyContext dep_context(&_dep_context); 2231 return dep_context; 2232 } 2233 2234 int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) { 2235 return dependencies().mark_dependent_nmethods(changes); 2236 } 2237 2238 void InstanceKlass::add_dependent_nmethod(nmethod* nm) { 2239 dependencies().add_dependent_nmethod(nm); 2240 } 2241 2242 void InstanceKlass::remove_dependent_nmethod(nmethod* nm, bool delete_immediately) { 2243 dependencies().remove_dependent_nmethod(nm, delete_immediately); 2244 } 2245 2246 #ifndef PRODUCT 2247 void InstanceKlass::print_dependent_nmethods(bool verbose) { 2248 dependencies().print_dependent_nmethods(verbose); 2249 } 2250 2251 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) { 2252 return dependencies().is_dependent_nmethod(nm); 2253 } 2254 #endif //PRODUCT 2255 2256 void InstanceKlass::clean_weak_instanceklass_links() { 2257 clean_implementors_list(); 2258 clean_method_data(); 2259 2260 // Since GC iterates InstanceKlasses sequentially, it is safe to remove stale entries here. 2261 DependencyContext dep_context(&_dep_context); 2262 dep_context.expunge_stale_entries(); 2263 } 2264 2265 void InstanceKlass::clean_implementors_list() { 2266 assert(is_loader_alive(), "this klass should be live"); 2267 if (is_interface()) { 2268 if (ClassUnloading) { 2269 Klass* impl = implementor(); 2270 if (impl != NULL) { 2271 if (!impl->is_loader_alive()) { 2272 // remove this guy 2273 Klass** klass = adr_implementor(); 2274 assert(klass != NULL, "null klass"); 2275 if (klass != NULL) { 2276 *klass = NULL; 2277 } 2278 } 2279 } 2280 } 2281 } 2282 } 2283 2284 void InstanceKlass::clean_method_data() { 2285 for (int m = 0; m < methods()->length(); m++) { 2286 MethodData* mdo = methods()->at(m)->method_data(); 2287 if (mdo != NULL) { 2288 mdo->clean_method_data(/*always_clean*/false); 2289 } 2290 } 2291 } 2292 2293 bool InstanceKlass::supers_have_passed_fingerprint_checks() { 2294 if (java_super() != NULL && !java_super()->has_passed_fingerprint_check()) { 2295 ResourceMark rm; 2296 log_trace(class, fingerprint)("%s : super %s not fingerprinted", external_name(), java_super()->external_name()); 2297 return false; 2298 } 2299 2300 Array<InstanceKlass*>* local_interfaces = this->local_interfaces(); 2301 if (local_interfaces != NULL) { 2302 int length = local_interfaces->length(); 2303 for (int i = 0; i < length; i++) { 2304 InstanceKlass* intf = local_interfaces->at(i); 2305 if (!intf->has_passed_fingerprint_check()) { 2306 ResourceMark rm; 2307 log_trace(class, fingerprint)("%s : interface %s not fingerprinted", external_name(), intf->external_name()); 2308 return false; 2309 } 2310 } 2311 } 2312 2313 return true; 2314 } 2315 2316 bool InstanceKlass::should_store_fingerprint(bool is_anonymous) { 2317 #if INCLUDE_AOT 2318 // We store the fingerprint into the InstanceKlass only in the following 2 cases: 2319 if (CalculateClassFingerprint) { 2320 // (1) We are running AOT to generate a shared library. 2321 return true; 2322 } 2323 if (DumpSharedSpaces) { 2324 // (2) We are running -Xshare:dump to create a shared archive 2325 return true; 2326 } 2327 if (UseAOT && is_anonymous) { 2328 // (3) We are using AOT code from a shared library and see an anonymous class 2329 return true; 2330 } 2331 #endif 2332 2333 // In all other cases we might set the _misc_has_passed_fingerprint_check bit, 2334 // but do not store the 64-bit fingerprint to save space. 2335 return false; 2336 } 2337 2338 bool InstanceKlass::has_stored_fingerprint() const { 2339 #if INCLUDE_AOT 2340 return should_store_fingerprint() || is_shared(); 2341 #else 2342 return false; 2343 #endif 2344 } 2345 2346 uint64_t InstanceKlass::get_stored_fingerprint() const { 2347 address adr = adr_fingerprint(); 2348 if (adr != NULL) { 2349 return (uint64_t)Bytes::get_native_u8(adr); // adr may not be 64-bit aligned 2350 } 2351 return 0; 2352 } 2353 2354 void InstanceKlass::store_fingerprint(uint64_t fingerprint) { 2355 address adr = adr_fingerprint(); 2356 if (adr != NULL) { 2357 Bytes::put_native_u8(adr, (u8)fingerprint); // adr may not be 64-bit aligned 2358 2359 ResourceMark rm; 2360 log_trace(class, fingerprint)("stored as " PTR64_FORMAT " for class %s", fingerprint, external_name()); 2361 } 2362 } 2363 2364 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) { 2365 Klass::metaspace_pointers_do(it); 2366 2367 if (log_is_enabled(Trace, cds)) { 2368 ResourceMark rm; 2369 log_trace(cds)("Iter(InstanceKlass): %p (%s)", this, external_name()); 2370 } 2371 2372 it->push(&_annotations); 2373 it->push((Klass**)&_array_klasses); 2374 it->push(&_constants); 2375 it->push(&_inner_classes); 2376 it->push(&_array_name); 2377 #if INCLUDE_JVMTI 2378 it->push(&_previous_versions); 2379 #endif 2380 it->push(&_methods); 2381 it->push(&_default_methods); 2382 it->push(&_local_interfaces); 2383 it->push(&_transitive_interfaces); 2384 it->push(&_method_ordering); 2385 it->push(&_default_vtable_indices); 2386 it->push(&_fields); 2387 2388 if (itable_length() > 0) { 2389 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 2390 int method_table_offset_in_words = ioe->offset()/wordSize; 2391 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 2392 / itableOffsetEntry::size(); 2393 2394 for (int i = 0; i < nof_interfaces; i ++, ioe ++) { 2395 if (ioe->interface_klass() != NULL) { 2396 it->push(ioe->interface_klass_addr()); 2397 itableMethodEntry* ime = ioe->first_method_entry(this); 2398 int n = klassItable::method_count_for_interface(ioe->interface_klass()); 2399 for (int index = 0; index < n; index ++) { 2400 it->push(ime[index].method_addr()); 2401 } 2402 } 2403 } 2404 } 2405 2406 it->push(&_nest_members); 2407 } 2408 2409 void InstanceKlass::remove_unshareable_info() { 2410 Klass::remove_unshareable_info(); 2411 2412 if (is_in_error_state()) { 2413 // Classes are attempted to link during dumping and may fail, 2414 // but these classes are still in the dictionary and class list in CLD. 2415 // Check in_error state first because in_error is > linked state, so 2416 // is_linked() is true. 2417 // If there's a linking error, there is nothing else to remove. 2418 return; 2419 } 2420 2421 // Unlink the class 2422 if (is_linked()) { 2423 unlink_class(); 2424 } 2425 { 2426 MutexLocker ml(Compile_lock); 2427 init_implementor(); 2428 } 2429 2430 constants()->remove_unshareable_info(); 2431 2432 for (int i = 0; i < methods()->length(); i++) { 2433 Method* m = methods()->at(i); 2434 m->remove_unshareable_info(); 2435 } 2436 2437 // do array classes also. 2438 if (array_klasses() != NULL) { 2439 array_klasses()->remove_unshareable_info(); 2440 } 2441 2442 // These are not allocated from metaspace, but they should should all be empty 2443 // during dump time, so we don't need to worry about them in InstanceKlass::iterate(). 2444 guarantee(_source_debug_extension == NULL, "must be"); 2445 guarantee(_dep_context == DependencyContext::EMPTY, "must be"); 2446 guarantee(_osr_nmethods_head == NULL, "must be"); 2447 2448 #if INCLUDE_JVMTI 2449 guarantee(_breakpoints == NULL, "must be"); 2450 guarantee(_previous_versions == NULL, "must be"); 2451 #endif 2452 2453 _init_thread = NULL; 2454 _methods_jmethod_ids = NULL; 2455 _jni_ids = NULL; 2456 _oop_map_cache = NULL; 2457 // clear _nest_host to ensure re-load at runtime 2458 _nest_host = NULL; 2459 } 2460 2461 void InstanceKlass::remove_java_mirror() { 2462 Klass::remove_java_mirror(); 2463 2464 // do array classes also. 2465 if (array_klasses() != NULL) { 2466 array_klasses()->remove_java_mirror(); 2467 } 2468 } 2469 2470 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) { 2471 set_package(loader_data, CHECK); 2472 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK); 2473 2474 Array<Method*>* methods = this->methods(); 2475 int num_methods = methods->length(); 2476 for (int index2 = 0; index2 < num_methods; ++index2) { 2477 methodHandle m(THREAD, methods->at(index2)); 2478 m->restore_unshareable_info(CHECK); 2479 } 2480 if (JvmtiExport::has_redefined_a_class()) { 2481 // Reinitialize vtable because RedefineClasses may have changed some 2482 // entries in this vtable for super classes so the CDS vtable might 2483 // point to old or obsolete entries. RedefineClasses doesn't fix up 2484 // vtables in the shared system dictionary, only the main one. 2485 // It also redefines the itable too so fix that too. 2486 vtable().initialize_vtable(false, CHECK); 2487 itable().initialize_itable(false, CHECK); 2488 } 2489 2490 // restore constant pool resolved references 2491 constants()->restore_unshareable_info(CHECK); 2492 2493 if (array_klasses() != NULL) { 2494 // Array classes have null protection domain. 2495 // --> see ArrayKlass::complete_create_array_klass() 2496 array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK); 2497 } 2498 } 2499 2500 // returns true IFF is_in_error_state() has been changed as a result of this call. 2501 bool InstanceKlass::check_sharing_error_state() { 2502 assert(DumpSharedSpaces, "should only be called during dumping"); 2503 bool old_state = is_in_error_state(); 2504 2505 if (!is_in_error_state()) { 2506 bool bad = false; 2507 for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) { 2508 if (sup->is_in_error_state()) { 2509 bad = true; 2510 break; 2511 } 2512 } 2513 if (!bad) { 2514 Array<InstanceKlass*>* interfaces = transitive_interfaces(); 2515 for (int i = 0; i < interfaces->length(); i++) { 2516 InstanceKlass* iface = interfaces->at(i); 2517 if (iface->is_in_error_state()) { 2518 bad = true; 2519 break; 2520 } 2521 } 2522 } 2523 2524 if (bad) { 2525 set_in_error_state(); 2526 } 2527 } 2528 2529 return (old_state != is_in_error_state()); 2530 } 2531 2532 #if INCLUDE_JVMTI 2533 static void clear_all_breakpoints(Method* m) { 2534 m->clear_all_breakpoints(); 2535 } 2536 #endif 2537 2538 void InstanceKlass::notify_unload_class(InstanceKlass* ik) { 2539 // notify the debugger 2540 if (JvmtiExport::should_post_class_unload()) { 2541 JvmtiExport::post_class_unload(ik); 2542 } 2543 2544 // notify ClassLoadingService of class unload 2545 ClassLoadingService::notify_class_unloaded(ik); 2546 } 2547 2548 void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) { 2549 // Clean up C heap 2550 ik->release_C_heap_structures(); 2551 ik->constants()->release_C_heap_structures(); 2552 } 2553 2554 void InstanceKlass::release_C_heap_structures() { 2555 // Can't release the constant pool here because the constant pool can be 2556 // deallocated separately from the InstanceKlass for default methods and 2557 // redefine classes. 2558 2559 // Deallocate oop map cache 2560 if (_oop_map_cache != NULL) { 2561 delete _oop_map_cache; 2562 _oop_map_cache = NULL; 2563 } 2564 2565 // Deallocate JNI identifiers for jfieldIDs 2566 JNIid::deallocate(jni_ids()); 2567 set_jni_ids(NULL); 2568 2569 jmethodID* jmeths = methods_jmethod_ids_acquire(); 2570 if (jmeths != (jmethodID*)NULL) { 2571 release_set_methods_jmethod_ids(NULL); 2572 FreeHeap(jmeths); 2573 } 2574 2575 // Release dependencies. 2576 // It is desirable to use DC::remove_all_dependents() here, but, unfortunately, 2577 // it is not safe (see JDK-8143408). The problem is that the klass dependency 2578 // context can contain live dependencies, since there's a race between nmethod & 2579 // klass unloading. If the klass is dead when nmethod unloading happens, relevant 2580 // dependencies aren't removed from the context associated with the class (see 2581 // nmethod::flush_dependencies). It ends up during klass unloading as seemingly 2582 // live dependencies pointing to unloaded nmethods and causes a crash in 2583 // DC::remove_all_dependents() when it touches unloaded nmethod. 2584 dependencies().wipe(); 2585 2586 #if INCLUDE_JVMTI 2587 // Deallocate breakpoint records 2588 if (breakpoints() != 0x0) { 2589 methods_do(clear_all_breakpoints); 2590 assert(breakpoints() == 0x0, "should have cleared breakpoints"); 2591 } 2592 2593 // deallocate the cached class file 2594 if (_cached_class_file != NULL && !MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 2595 os::free(_cached_class_file); 2596 _cached_class_file = NULL; 2597 } 2598 #endif 2599 2600 // Decrement symbol reference counts associated with the unloaded class. 2601 if (_name != NULL) _name->decrement_refcount(); 2602 // unreference array name derived from this class name (arrays of an unloaded 2603 // class can't be referenced anymore). 2604 if (_array_name != NULL) _array_name->decrement_refcount(); 2605 if (_value_types != NULL) { 2606 for (int i = 0; i < _value_types->length(); i++) { 2607 Symbol* s = _value_types->at(i)._class_name; 2608 if (s != NULL) { 2609 s->decrement_refcount(); 2610 } 2611 } 2612 } 2613 if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension); 2614 } 2615 2616 void InstanceKlass::set_source_debug_extension(const char* array, int length) { 2617 if (array == NULL) { 2618 _source_debug_extension = NULL; 2619 } else { 2620 // Adding one to the attribute length in order to store a null terminator 2621 // character could cause an overflow because the attribute length is 2622 // already coded with an u4 in the classfile, but in practice, it's 2623 // unlikely to happen. 2624 assert((length+1) > length, "Overflow checking"); 2625 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass); 2626 for (int i = 0; i < length; i++) { 2627 sde[i] = array[i]; 2628 } 2629 sde[length] = '\0'; 2630 _source_debug_extension = sde; 2631 } 2632 } 2633 2634 const char* InstanceKlass::signature_name() const { 2635 int hash_len = 0; 2636 char hash_buf[40]; 2637 2638 // If this is an anonymous class, append a hash to make the name unique 2639 if (is_anonymous()) { 2640 intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0; 2641 jio_snprintf(hash_buf, sizeof(hash_buf), "/" UINTX_FORMAT, (uintx)hash); 2642 hash_len = (int)strlen(hash_buf); 2643 } 2644 2645 // Get the internal name as a c string 2646 const char* src = (const char*) (name()->as_C_string()); 2647 const int src_length = (int)strlen(src); 2648 2649 char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3); 2650 2651 // Add L as type indicator 2652 int dest_index = 0; 2653 dest[dest_index++] = 'L'; 2654 2655 // Add the actual class name 2656 for (int src_index = 0; src_index < src_length; ) { 2657 dest[dest_index++] = src[src_index++]; 2658 } 2659 2660 // If we have a hash, append it 2661 for (int hash_index = 0; hash_index < hash_len; ) { 2662 dest[dest_index++] = hash_buf[hash_index++]; 2663 } 2664 2665 // Add the semicolon and the NULL 2666 dest[dest_index++] = ';'; 2667 dest[dest_index] = '\0'; 2668 return dest; 2669 } 2670 2671 // Used to obtain the package name from a fully qualified class name. 2672 Symbol* InstanceKlass::package_from_name(const Symbol* name, TRAPS) { 2673 if (name == NULL) { 2674 return NULL; 2675 } else { 2676 if (name->utf8_length() <= 0) { 2677 return NULL; 2678 } 2679 ResourceMark rm; 2680 const char* package_name = ClassLoader::package_from_name((const char*) name->as_C_string()); 2681 if (package_name == NULL) { 2682 return NULL; 2683 } 2684 Symbol* pkg_name = SymbolTable::new_symbol(package_name, THREAD); 2685 return pkg_name; 2686 } 2687 } 2688 2689 ModuleEntry* InstanceKlass::module() const { 2690 if (!in_unnamed_package()) { 2691 return _package_entry->module(); 2692 } 2693 const Klass* host = host_klass(); 2694 if (host == NULL) { 2695 return class_loader_data()->unnamed_module(); 2696 } 2697 return host->class_loader_data()->unnamed_module(); 2698 } 2699 2700 void InstanceKlass::set_package(ClassLoaderData* loader_data, TRAPS) { 2701 2702 // ensure java/ packages only loaded by boot or platform builtin loaders 2703 check_prohibited_package(name(), loader_data, CHECK); 2704 2705 TempNewSymbol pkg_name = package_from_name(name(), CHECK); 2706 2707 if (pkg_name != NULL && loader_data != NULL) { 2708 2709 // Find in class loader's package entry table. 2710 _package_entry = loader_data->packages()->lookup_only(pkg_name); 2711 2712 // If the package name is not found in the loader's package 2713 // entry table, it is an indication that the package has not 2714 // been defined. Consider it defined within the unnamed module. 2715 if (_package_entry == NULL) { 2716 ResourceMark rm; 2717 2718 if (!ModuleEntryTable::javabase_defined()) { 2719 // Before java.base is defined during bootstrapping, define all packages in 2720 // the java.base module. If a non-java.base package is erroneously placed 2721 // in the java.base module it will be caught later when java.base 2722 // is defined by ModuleEntryTable::verify_javabase_packages check. 2723 assert(ModuleEntryTable::javabase_moduleEntry() != NULL, JAVA_BASE_NAME " module is NULL"); 2724 _package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry()); 2725 } else { 2726 assert(loader_data->unnamed_module() != NULL, "unnamed module is NULL"); 2727 _package_entry = loader_data->packages()->lookup(pkg_name, 2728 loader_data->unnamed_module()); 2729 } 2730 2731 // A package should have been successfully created 2732 assert(_package_entry != NULL, "Package entry for class %s not found, loader %s", 2733 name()->as_C_string(), loader_data->loader_name_and_id()); 2734 } 2735 2736 if (log_is_enabled(Debug, module)) { 2737 ResourceMark rm; 2738 ModuleEntry* m = _package_entry->module(); 2739 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s", 2740 external_name(), 2741 pkg_name->as_C_string(), 2742 loader_data->loader_name_and_id(), 2743 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE)); 2744 } 2745 } else { 2746 ResourceMark rm; 2747 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s", 2748 external_name(), 2749 (loader_data != NULL) ? loader_data->loader_name_and_id() : "NULL", 2750 UNNAMED_MODULE); 2751 } 2752 } 2753 2754 2755 // different versions of is_same_class_package 2756 2757 bool InstanceKlass::is_same_class_package(const Klass* class2) const { 2758 oop classloader1 = this->class_loader(); 2759 PackageEntry* classpkg1 = this->package(); 2760 if (class2->is_objArray_klass()) { 2761 class2 = ObjArrayKlass::cast(class2)->bottom_klass(); 2762 } 2763 2764 oop classloader2; 2765 PackageEntry* classpkg2; 2766 if (class2->is_instance_klass()) { 2767 classloader2 = class2->class_loader(); 2768 classpkg2 = class2->package(); 2769 } else { 2770 assert(class2->is_typeArray_klass(), "should be type array"); 2771 classloader2 = NULL; 2772 classpkg2 = NULL; 2773 } 2774 2775 // Same package is determined by comparing class loader 2776 // and package entries. Both must be the same. This rule 2777 // applies even to classes that are defined in the unnamed 2778 // package, they still must have the same class loader. 2779 if (oopDesc::equals(classloader1, classloader2) && (classpkg1 == classpkg2)) { 2780 return true; 2781 } 2782 2783 return false; 2784 } 2785 2786 // return true if this class and other_class are in the same package. Classloader 2787 // and classname information is enough to determine a class's package 2788 bool InstanceKlass::is_same_class_package(oop other_class_loader, 2789 const Symbol* other_class_name) const { 2790 if (!oopDesc::equals(class_loader(), other_class_loader)) { 2791 return false; 2792 } 2793 if (name()->fast_compare(other_class_name) == 0) { 2794 return true; 2795 } 2796 2797 { 2798 ResourceMark rm; 2799 2800 bool bad_class_name = false; 2801 const char* other_pkg = 2802 ClassLoader::package_from_name((const char*) other_class_name->as_C_string(), &bad_class_name); 2803 if (bad_class_name) { 2804 return false; 2805 } 2806 // Check that package_from_name() returns NULL, not "", if there is no package. 2807 assert(other_pkg == NULL || strlen(other_pkg) > 0, "package name is empty string"); 2808 2809 const Symbol* const this_package_name = 2810 this->package() != NULL ? this->package()->name() : NULL; 2811 2812 if (this_package_name == NULL || other_pkg == NULL) { 2813 // One of the two doesn't have a package. Only return true if the other 2814 // one also doesn't have a package. 2815 return (const char*)this_package_name == other_pkg; 2816 } 2817 2818 // Check if package is identical 2819 return this_package_name->equals(other_pkg); 2820 } 2821 } 2822 2823 // Returns true iff super_method can be overridden by a method in targetclassname 2824 // See JLS 3rd edition 8.4.6.1 2825 // Assumes name-signature match 2826 // "this" is InstanceKlass of super_method which must exist 2827 // note that the InstanceKlass of the method in the targetclassname has not always been created yet 2828 bool InstanceKlass::is_override(const methodHandle& super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) { 2829 // Private methods can not be overridden 2830 if (super_method->is_private()) { 2831 return false; 2832 } 2833 // If super method is accessible, then override 2834 if ((super_method->is_protected()) || 2835 (super_method->is_public())) { 2836 return true; 2837 } 2838 // Package-private methods are not inherited outside of package 2839 assert(super_method->is_package_private(), "must be package private"); 2840 return(is_same_class_package(targetclassloader(), targetclassname)); 2841 } 2842 2843 // Only boot and platform class loaders can define classes in "java/" packages. 2844 void InstanceKlass::check_prohibited_package(Symbol* class_name, 2845 ClassLoaderData* loader_data, 2846 TRAPS) { 2847 if (!loader_data->is_boot_class_loader_data() && 2848 !loader_data->is_platform_class_loader_data() && 2849 class_name != NULL) { 2850 ResourceMark rm(THREAD); 2851 char* name = class_name->as_C_string(); 2852 if (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/') { 2853 TempNewSymbol pkg_name = InstanceKlass::package_from_name(class_name, CHECK); 2854 assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'"); 2855 name = pkg_name->as_C_string(); 2856 const char* class_loader_name = loader_data->loader_name_and_id(); 2857 StringUtils::replace_no_expand(name, "/", "."); 2858 const char* msg_text1 = "Class loader (instance of): "; 2859 const char* msg_text2 = " tried to load prohibited package name: "; 2860 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1; 2861 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len); 2862 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name); 2863 THROW_MSG(vmSymbols::java_lang_SecurityException(), message); 2864 } 2865 } 2866 return; 2867 } 2868 2869 // tell if two classes have the same enclosing class (at package level) 2870 bool InstanceKlass::is_same_package_member(const Klass* class2, TRAPS) const { 2871 if (class2 == this) return true; 2872 if (!class2->is_instance_klass()) return false; 2873 2874 // must be in same package before we try anything else 2875 if (!is_same_class_package(class2)) 2876 return false; 2877 2878 // As long as there is an outer_this.getEnclosingClass, 2879 // shift the search outward. 2880 const InstanceKlass* outer_this = this; 2881 for (;;) { 2882 // As we walk along, look for equalities between outer_this and class2. 2883 // Eventually, the walks will terminate as outer_this stops 2884 // at the top-level class around the original class. 2885 bool ignore_inner_is_member; 2886 const Klass* next = outer_this->compute_enclosing_class(&ignore_inner_is_member, 2887 CHECK_false); 2888 if (next == NULL) break; 2889 if (next == class2) return true; 2890 outer_this = InstanceKlass::cast(next); 2891 } 2892 2893 // Now do the same for class2. 2894 const InstanceKlass* outer2 = InstanceKlass::cast(class2); 2895 for (;;) { 2896 bool ignore_inner_is_member; 2897 Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member, 2898 CHECK_false); 2899 if (next == NULL) break; 2900 // Might as well check the new outer against all available values. 2901 if (next == this) return true; 2902 if (next == outer_this) return true; 2903 outer2 = InstanceKlass::cast(next); 2904 } 2905 2906 // If by this point we have not found an equality between the 2907 // two classes, we know they are in separate package members. 2908 return false; 2909 } 2910 2911 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const { 2912 constantPoolHandle i_cp(THREAD, constants()); 2913 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) { 2914 int ioff = iter.inner_class_info_index(); 2915 if (ioff != 0) { 2916 // Check to see if the name matches the class we're looking for 2917 // before attempting to find the class. 2918 if (i_cp->klass_name_at_matches(this, ioff)) { 2919 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false); 2920 if (this == inner_klass) { 2921 *ooff = iter.outer_class_info_index(); 2922 *noff = iter.inner_name_index(); 2923 return true; 2924 } 2925 } 2926 } 2927 } 2928 return false; 2929 } 2930 2931 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const { 2932 InstanceKlass* outer_klass = NULL; 2933 *inner_is_member = false; 2934 int ooff = 0, noff = 0; 2935 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD); 2936 if (has_inner_classes_attr) { 2937 constantPoolHandle i_cp(THREAD, constants()); 2938 if (ooff != 0) { 2939 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL); 2940 outer_klass = InstanceKlass::cast(ok); 2941 *inner_is_member = true; 2942 } 2943 if (NULL == outer_klass) { 2944 // It may be anonymous; try for that. 2945 int encl_method_class_idx = enclosing_method_class_index(); 2946 if (encl_method_class_idx != 0) { 2947 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL); 2948 outer_klass = InstanceKlass::cast(ok); 2949 *inner_is_member = false; 2950 } 2951 } 2952 } 2953 2954 // If no inner class attribute found for this class. 2955 if (NULL == outer_klass) return NULL; 2956 2957 // Throws an exception if outer klass has not declared k as an inner klass 2958 // We need evidence that each klass knows about the other, or else 2959 // the system could allow a spoof of an inner class to gain access rights. 2960 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL); 2961 return outer_klass; 2962 } 2963 2964 jint InstanceKlass::compute_modifier_flags(TRAPS) const { 2965 jint access = access_flags().as_int(); 2966 2967 // But check if it happens to be member class. 2968 InnerClassesIterator iter(this); 2969 for (; !iter.done(); iter.next()) { 2970 int ioff = iter.inner_class_info_index(); 2971 // Inner class attribute can be zero, skip it. 2972 // Strange but true: JVM spec. allows null inner class refs. 2973 if (ioff == 0) continue; 2974 2975 // only look at classes that are already loaded 2976 // since we are looking for the flags for our self. 2977 Symbol* inner_name = constants()->klass_name_at(ioff); 2978 if (name() == inner_name) { 2979 // This is really a member class. 2980 access = iter.inner_access_flags(); 2981 break; 2982 } 2983 } 2984 // Remember to strip ACC_SUPER bit 2985 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS; 2986 } 2987 2988 jint InstanceKlass::jvmti_class_status() const { 2989 jint result = 0; 2990 2991 if (is_linked()) { 2992 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED; 2993 } 2994 2995 if (is_initialized()) { 2996 assert(is_linked(), "Class status is not consistent"); 2997 result |= JVMTI_CLASS_STATUS_INITIALIZED; 2998 } 2999 if (is_in_error_state()) { 3000 result |= JVMTI_CLASS_STATUS_ERROR; 3001 } 3002 return result; 3003 } 3004 3005 Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) { 3006 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable(); 3007 int method_table_offset_in_words = ioe->offset()/wordSize; 3008 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words()) 3009 / itableOffsetEntry::size(); 3010 3011 for (int cnt = 0 ; ; cnt ++, ioe ++) { 3012 // If the interface isn't implemented by the receiver class, 3013 // the VM should throw IncompatibleClassChangeError. 3014 if (cnt >= nof_interfaces) { 3015 ResourceMark rm(THREAD); 3016 stringStream ss; 3017 bool same_module = (module() == holder->module()); 3018 ss.print("Receiver class %s does not implement " 3019 "the interface %s defining the method to be called " 3020 "(%s%s%s)", 3021 external_name(), holder->external_name(), 3022 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(), 3023 (same_module) ? "" : "; ", 3024 (same_module) ? "" : holder->class_in_module_of_loader()); 3025 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string()); 3026 } 3027 3028 Klass* ik = ioe->interface_klass(); 3029 if (ik == holder) break; 3030 } 3031 3032 itableMethodEntry* ime = ioe->first_method_entry(this); 3033 Method* m = ime[index].method(); 3034 if (m == NULL) { 3035 THROW_NULL(vmSymbols::java_lang_AbstractMethodError()); 3036 } 3037 return m; 3038 } 3039 3040 3041 #if INCLUDE_JVMTI 3042 // update default_methods for redefineclasses for methods that are 3043 // not yet in the vtable due to concurrent subclass define and superinterface 3044 // redefinition 3045 // Note: those in the vtable, should have been updated via adjust_method_entries 3046 void InstanceKlass::adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed) { 3047 // search the default_methods for uses of either obsolete or EMCP methods 3048 if (default_methods() != NULL) { 3049 for (int index = 0; index < default_methods()->length(); index ++) { 3050 Method* old_method = default_methods()->at(index); 3051 if (old_method == NULL || old_method->method_holder() != holder || !old_method->is_old()) { 3052 continue; // skip uninteresting entries 3053 } 3054 assert(!old_method->is_deleted(), "default methods may not be deleted"); 3055 3056 Method* new_method = holder->method_with_idnum(old_method->orig_method_idnum()); 3057 3058 assert(new_method != NULL, "method_with_idnum() should not be NULL"); 3059 assert(old_method != new_method, "sanity check"); 3060 3061 default_methods()->at_put(index, new_method); 3062 if (log_is_enabled(Info, redefine, class, update)) { 3063 ResourceMark rm; 3064 if (!(*trace_name_printed)) { 3065 log_info(redefine, class, update) 3066 ("adjust: klassname=%s default methods from name=%s", 3067 external_name(), old_method->method_holder()->external_name()); 3068 *trace_name_printed = true; 3069 } 3070 log_debug(redefine, class, update, vtables) 3071 ("default method update: %s(%s) ", 3072 new_method->name()->as_C_string(), new_method->signature()->as_C_string()); 3073 } 3074 } 3075 } 3076 } 3077 #endif // INCLUDE_JVMTI 3078 3079 // On-stack replacement stuff 3080 void InstanceKlass::add_osr_nmethod(nmethod* n) { 3081 // only one compilation can be active 3082 { 3083 // This is a short non-blocking critical region, so the no safepoint check is ok. 3084 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 3085 assert(n->is_osr_method(), "wrong kind of nmethod"); 3086 n->set_osr_link(osr_nmethods_head()); 3087 set_osr_nmethods_head(n); 3088 // Raise the highest osr level if necessary 3089 if (TieredCompilation) { 3090 Method* m = n->method(); 3091 m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level())); 3092 } 3093 } 3094 3095 // Get rid of the osr methods for the same bci that have lower levels. 3096 if (TieredCompilation) { 3097 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) { 3098 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true); 3099 if (inv != NULL && inv->is_in_use()) { 3100 inv->make_not_entrant(); 3101 } 3102 } 3103 } 3104 } 3105 3106 // Remove osr nmethod from the list. Return true if found and removed. 3107 bool InstanceKlass::remove_osr_nmethod(nmethod* n) { 3108 // This is a short non-blocking critical region, so the no safepoint check is ok. 3109 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 3110 assert(n->is_osr_method(), "wrong kind of nmethod"); 3111 nmethod* last = NULL; 3112 nmethod* cur = osr_nmethods_head(); 3113 int max_level = CompLevel_none; // Find the max comp level excluding n 3114 Method* m = n->method(); 3115 // Search for match 3116 bool found = false; 3117 while(cur != NULL && cur != n) { 3118 if (TieredCompilation && m == cur->method()) { 3119 // Find max level before n 3120 max_level = MAX2(max_level, cur->comp_level()); 3121 } 3122 last = cur; 3123 cur = cur->osr_link(); 3124 } 3125 nmethod* next = NULL; 3126 if (cur == n) { 3127 found = true; 3128 next = cur->osr_link(); 3129 if (last == NULL) { 3130 // Remove first element 3131 set_osr_nmethods_head(next); 3132 } else { 3133 last->set_osr_link(next); 3134 } 3135 } 3136 n->set_osr_link(NULL); 3137 if (TieredCompilation) { 3138 cur = next; 3139 while (cur != NULL) { 3140 // Find max level after n 3141 if (m == cur->method()) { 3142 max_level = MAX2(max_level, cur->comp_level()); 3143 } 3144 cur = cur->osr_link(); 3145 } 3146 m->set_highest_osr_comp_level(max_level); 3147 } 3148 return found; 3149 } 3150 3151 int InstanceKlass::mark_osr_nmethods(const Method* m) { 3152 // This is a short non-blocking critical region, so the no safepoint check is ok. 3153 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 3154 nmethod* osr = osr_nmethods_head(); 3155 int found = 0; 3156 while (osr != NULL) { 3157 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 3158 if (osr->method() == m) { 3159 osr->mark_for_deoptimization(); 3160 found++; 3161 } 3162 osr = osr->osr_link(); 3163 } 3164 return found; 3165 } 3166 3167 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const { 3168 // This is a short non-blocking critical region, so the no safepoint check is ok. 3169 MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag); 3170 nmethod* osr = osr_nmethods_head(); 3171 nmethod* best = NULL; 3172 while (osr != NULL) { 3173 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain"); 3174 // There can be a time when a c1 osr method exists but we are waiting 3175 // for a c2 version. When c2 completes its osr nmethod we will trash 3176 // the c1 version and only be able to find the c2 version. However 3177 // while we overflow in the c1 code at back branches we don't want to 3178 // try and switch to the same code as we are already running 3179 3180 if (osr->method() == m && 3181 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) { 3182 if (match_level) { 3183 if (osr->comp_level() == comp_level) { 3184 // Found a match - return it. 3185 return osr; 3186 } 3187 } else { 3188 if (best == NULL || (osr->comp_level() > best->comp_level())) { 3189 if (osr->comp_level() == CompLevel_highest_tier) { 3190 // Found the best possible - return it. 3191 return osr; 3192 } 3193 best = osr; 3194 } 3195 } 3196 } 3197 osr = osr->osr_link(); 3198 } 3199 if (best != NULL && best->comp_level() >= comp_level && match_level == false) { 3200 return best; 3201 } 3202 return NULL; 3203 } 3204 3205 // ----------------------------------------------------------------------------------------------------- 3206 // Printing 3207 3208 #ifndef PRODUCT 3209 3210 #define BULLET " - " 3211 3212 static const char* state_names[] = { 3213 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error" 3214 }; 3215 3216 static void print_vtable(address self, intptr_t* start, int len, outputStream* st) { 3217 ResourceMark rm; 3218 int* forward_refs = NEW_RESOURCE_ARRAY(int, len); 3219 for (int i = 0; i < len; i++) forward_refs[i] = 0; 3220 for (int i = 0; i < len; i++) { 3221 intptr_t e = start[i]; 3222 st->print("%d : " INTPTR_FORMAT, i, e); 3223 if (forward_refs[i] != 0) { 3224 int from = forward_refs[i]; 3225 int off = (int) start[from]; 3226 st->print(" (offset %d <= [%d])", off, from); 3227 } 3228 if (e != 0 && ((Metadata*)e)->is_metaspace_object()) { 3229 st->print(" "); 3230 ((Metadata*)e)->print_value_on(st); 3231 } else if (self != NULL && e > 0 && e < 0x10000) { 3232 address location = self + e; 3233 int index = (int)((intptr_t*)location - start); 3234 st->print(" (offset %d => [%d])", (int)e, index); 3235 if (index >= 0 && index < len) 3236 forward_refs[index] = i; 3237 } 3238 st->cr(); 3239 } 3240 } 3241 3242 static void print_vtable(vtableEntry* start, int len, outputStream* st) { 3243 return print_vtable(NULL, reinterpret_cast<intptr_t*>(start), len, st); 3244 } 3245 3246 template<typename T> 3247 static void print_array_on(outputStream* st, Array<T>* array) { 3248 if (array == NULL) { st->print_cr("NULL"); return; } 3249 array->print_value_on(st); st->cr(); 3250 if (Verbose || WizardMode) { 3251 for (int i = 0; i < array->length(); i++) { 3252 st->print("%d : ", i); array->at(i)->print_value_on(st); st->cr(); 3253 } 3254 } 3255 } 3256 3257 static void print_array_on(outputStream* st, Array<int>* array) { 3258 if (array == NULL) { st->print_cr("NULL"); return; } 3259 array->print_value_on(st); st->cr(); 3260 if (Verbose || WizardMode) { 3261 for (int i = 0; i < array->length(); i++) { 3262 st->print("%d : %d", i, array->at(i)); st->cr(); 3263 } 3264 } 3265 } 3266 3267 void InstanceKlass::print_on(outputStream* st) const { 3268 assert(is_klass(), "must be klass"); 3269 Klass::print_on(st); 3270 3271 st->print(BULLET"instance size: %d", size_helper()); st->cr(); 3272 st->print(BULLET"klass size: %d", size()); st->cr(); 3273 st->print(BULLET"access: "); access_flags().print_on(st); st->cr(); 3274 st->print(BULLET"misc flags: 0x%x", _misc_flags); st->cr(); 3275 st->print(BULLET"state: "); st->print_cr("%s", state_names[_init_state]); 3276 st->print(BULLET"name: "); name()->print_value_on(st); st->cr(); 3277 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr(); 3278 st->print(BULLET"sub: "); 3279 Klass* sub = subklass(); 3280 int n; 3281 for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) { 3282 if (n < MaxSubklassPrintSize) { 3283 sub->print_value_on(st); 3284 st->print(" "); 3285 } 3286 } 3287 if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize); 3288 st->cr(); 3289 3290 if (is_interface()) { 3291 MutexLocker ml(Compile_lock); 3292 st->print_cr(BULLET"nof implementors: %d", nof_implementors()); 3293 if (nof_implementors() == 1) { 3294 st->print_cr(BULLET"implementor: "); 3295 st->print(" "); 3296 implementor()->print_value_on(st); 3297 st->cr(); 3298 } 3299 } 3300 3301 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr(); 3302 st->print(BULLET"methods: "); print_array_on(st, methods()); 3303 st->print(BULLET"method ordering: "); print_array_on(st, method_ordering()); 3304 st->print(BULLET"default_methods: "); print_array_on(st, default_methods()); 3305 if (default_vtable_indices() != NULL) { 3306 st->print(BULLET"default vtable indices: "); print_array_on(st, default_vtable_indices()); 3307 } 3308 st->print(BULLET"local interfaces: "); print_array_on(st, local_interfaces()); 3309 st->print(BULLET"trans. interfaces: "); print_array_on(st, transitive_interfaces()); 3310 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr(); 3311 if (class_loader_data() != NULL) { 3312 st->print(BULLET"class loader data: "); 3313 class_loader_data()->print_value_on(st); 3314 st->cr(); 3315 } 3316 st->print(BULLET"host class: "); Metadata::print_value_on_maybe_null(st, host_klass()); st->cr(); 3317 if (source_file_name() != NULL) { 3318 st->print(BULLET"source file: "); 3319 source_file_name()->print_value_on(st); 3320 st->cr(); 3321 } 3322 if (source_debug_extension() != NULL) { 3323 st->print(BULLET"source debug extension: "); 3324 st->print("%s", source_debug_extension()); 3325 st->cr(); 3326 } 3327 st->print(BULLET"class annotations: "); class_annotations()->print_value_on(st); st->cr(); 3328 st->print(BULLET"class type annotations: "); class_type_annotations()->print_value_on(st); st->cr(); 3329 st->print(BULLET"field annotations: "); fields_annotations()->print_value_on(st); st->cr(); 3330 st->print(BULLET"field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr(); 3331 { 3332 bool have_pv = false; 3333 // previous versions are linked together through the InstanceKlass 3334 for (InstanceKlass* pv_node = previous_versions(); 3335 pv_node != NULL; 3336 pv_node = pv_node->previous_versions()) { 3337 if (!have_pv) 3338 st->print(BULLET"previous version: "); 3339 have_pv = true; 3340 pv_node->constants()->print_value_on(st); 3341 } 3342 if (have_pv) st->cr(); 3343 } 3344 3345 if (generic_signature() != NULL) { 3346 st->print(BULLET"generic signature: "); 3347 generic_signature()->print_value_on(st); 3348 st->cr(); 3349 } 3350 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr(); 3351 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr(); 3352 if (java_mirror() != NULL) { 3353 st->print(BULLET"java mirror: "); 3354 java_mirror()->print_value_on(st); 3355 st->cr(); 3356 } else { 3357 st->print_cr(BULLET"java mirror: NULL"); 3358 } 3359 st->print(BULLET"vtable length %d (start addr: " INTPTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr(); 3360 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st); 3361 st->print(BULLET"itable length %d (start addr: " INTPTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr(); 3362 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(NULL, start_of_itable(), itable_length(), st); 3363 st->print_cr(BULLET"---- static fields (%d words):", static_field_size()); 3364 FieldPrinter print_static_field(st); 3365 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field); 3366 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size()); 3367 FieldPrinter print_nonstatic_field(st); 3368 InstanceKlass* ik = const_cast<InstanceKlass*>(this); 3369 ik->do_nonstatic_fields(&print_nonstatic_field); 3370 3371 st->print(BULLET"non-static oop maps: "); 3372 OopMapBlock* map = start_of_nonstatic_oop_maps(); 3373 OopMapBlock* end_map = map + nonstatic_oop_map_count(); 3374 while (map < end_map) { 3375 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1)); 3376 map++; 3377 } 3378 st->cr(); 3379 } 3380 3381 #endif //PRODUCT 3382 3383 void InstanceKlass::print_value_on(outputStream* st) const { 3384 assert(is_klass(), "must be klass"); 3385 if (Verbose || WizardMode) access_flags().print_on(st); 3386 name()->print_value_on(st); 3387 } 3388 3389 #ifndef PRODUCT 3390 3391 void FieldPrinter::do_field(fieldDescriptor* fd) { 3392 _st->print(BULLET); 3393 if (_obj == NULL) { 3394 fd->print_on(_st); 3395 _st->cr(); 3396 } else { 3397 fd->print_on_for(_st, _obj); 3398 _st->cr(); 3399 } 3400 } 3401 3402 3403 void InstanceKlass::oop_print_on(oop obj, outputStream* st) { 3404 Klass::oop_print_on(obj, st); 3405 3406 if (this == SystemDictionary::String_klass()) { 3407 typeArrayOop value = java_lang_String::value(obj); 3408 juint length = java_lang_String::length(obj); 3409 if (value != NULL && 3410 value->is_typeArray() && 3411 length <= (juint) value->length()) { 3412 st->print(BULLET"string: "); 3413 java_lang_String::print(obj, st); 3414 st->cr(); 3415 if (!WizardMode) return; // that is enough 3416 } 3417 } 3418 3419 st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj)); 3420 FieldPrinter print_field(st, obj); 3421 do_nonstatic_fields(&print_field); 3422 3423 if (this == SystemDictionary::Class_klass()) { 3424 st->print(BULLET"signature: "); 3425 java_lang_Class::print_signature(obj, st); 3426 st->cr(); 3427 Klass* mirrored_klass = java_lang_Class::as_Klass(obj); 3428 st->print(BULLET"fake entry for mirror: "); 3429 Metadata::print_value_on_maybe_null(st, mirrored_klass); 3430 st->cr(); 3431 Klass* array_klass = java_lang_Class::array_klass_acquire(obj); 3432 st->print(BULLET"fake entry for array: "); 3433 Metadata::print_value_on_maybe_null(st, array_klass); 3434 st->cr(); 3435 st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj)); 3436 st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj)); 3437 Klass* real_klass = java_lang_Class::as_Klass(obj); 3438 if (real_klass != NULL && real_klass->is_instance_klass()) { 3439 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field); 3440 } 3441 } else if (this == SystemDictionary::MethodType_klass()) { 3442 st->print(BULLET"signature: "); 3443 java_lang_invoke_MethodType::print_signature(obj, st); 3444 st->cr(); 3445 } 3446 } 3447 3448 bool InstanceKlass::verify_itable_index(int i) { 3449 int method_count = klassItable::method_count_for_interface(this); 3450 assert(i >= 0 && i < method_count, "index out of bounds"); 3451 return true; 3452 } 3453 3454 #endif //PRODUCT 3455 3456 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) { 3457 st->print("a "); 3458 name()->print_value_on(st); 3459 obj->print_address_on(st); 3460 if (this == SystemDictionary::String_klass() 3461 && java_lang_String::value(obj) != NULL) { 3462 ResourceMark rm; 3463 int len = java_lang_String::length(obj); 3464 int plen = (len < 24 ? len : 12); 3465 char* str = java_lang_String::as_utf8_string(obj, 0, plen); 3466 st->print(" = \"%s\"", str); 3467 if (len > plen) 3468 st->print("...[%d]", len); 3469 } else if (this == SystemDictionary::Class_klass()) { 3470 Klass* k = java_lang_Class::as_Klass(obj); 3471 st->print(" = "); 3472 if (k != NULL) { 3473 k->print_value_on(st); 3474 } else { 3475 const char* tname = type2name(java_lang_Class::primitive_type(obj)); 3476 st->print("%s", tname ? tname : "type?"); 3477 } 3478 } else if (this == SystemDictionary::MethodType_klass()) { 3479 st->print(" = "); 3480 java_lang_invoke_MethodType::print_signature(obj, st); 3481 } else if (java_lang_boxing_object::is_instance(obj)) { 3482 st->print(" = "); 3483 java_lang_boxing_object::print(obj, st); 3484 } else if (this == SystemDictionary::LambdaForm_klass()) { 3485 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj); 3486 if (vmentry != NULL) { 3487 st->print(" => "); 3488 vmentry->print_value_on(st); 3489 } 3490 } else if (this == SystemDictionary::MemberName_klass()) { 3491 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj); 3492 if (vmtarget != NULL) { 3493 st->print(" = "); 3494 vmtarget->print_value_on(st); 3495 } else { 3496 java_lang_invoke_MemberName::clazz(obj)->print_value_on(st); 3497 st->print("."); 3498 java_lang_invoke_MemberName::name(obj)->print_value_on(st); 3499 } 3500 } 3501 } 3502 3503 const char* InstanceKlass::internal_name() const { 3504 return external_name(); 3505 } 3506 3507 bool InstanceKlass::is_declared_value_type(int index) { 3508 assert(constants()->is_within_bounds(index) && 3509 constants()->tag_at(index).is_klass_or_reference(), "Invalid index"); 3510 return InstanceKlass::is_declared_value_type(value_types(), index); 3511 } 3512 3513 bool InstanceKlass::is_declared_value_type(Array<ValueTypes>* value_types, int index) { 3514 if (value_types == NULL) return false; // No ValueType attribute in this class file 3515 for(int i = 0; i < value_types->length(); i++) { 3516 if (value_types->at(i)._class_info_index == index) { 3517 return true; 3518 } 3519 } 3520 return false; 3521 } 3522 3523 bool InstanceKlass::is_declared_value_type(Symbol* symbol) { 3524 return InstanceKlass::is_declared_value_type(constants(), value_types(), symbol); 3525 } 3526 3527 bool InstanceKlass::is_declared_value_type(ConstantPool* constants, Array<ValueTypes>* value_types, Symbol* symbol) { 3528 assert(symbol != NULL, "Sanity check"); 3529 if (value_types == NULL) return false; // No ValueType attribute in this class file 3530 for(int i = 0; i < value_types->length(); i++) { 3531 if (value_types->at(i)._class_name == symbol) { 3532 return true; 3533 } 3534 } 3535 // symbol not found, class name symbol might not have been 3536 // updated yet 3537 for(int i = 0; i < value_types->length(); i++) { 3538 if (constants->klass_at_noresolve((int)value_types->at(i)._class_info_index) == symbol) { 3539 value_types->adr_at(i)->_class_name = symbol; 3540 symbol->increment_refcount(); 3541 return true; 3542 } 3543 } 3544 return false; 3545 } 3546 3547 Symbol* InstanceKlass::get_declared_value_type_name(int i) { 3548 Array<ValueTypes>* vtypes = value_types(); 3549 assert(i < vtypes->length(), "index out of bound"); 3550 Symbol* sym = vtypes->at(i)._class_name; 3551 if (sym == NULL) { 3552 sym = constants()->klass_at_noresolve((int)vtypes->at(i)._class_info_index); 3553 vtypes->adr_at(i)->_class_name = sym; 3554 sym->increment_refcount(); 3555 } 3556 return sym; 3557 } 3558 3559 void InstanceKlass::check_signature_for_value_types_consistency(Symbol* signature, 3560 InstanceKlass* k1, 3561 InstanceKlass* k2, TRAPS) { 3562 if (signature->utf8_length() == 1) return; // Primitive signature 3563 if (!(k1->has_value_types_attribute() || k2->has_value_types_attribute())) return; 3564 ResourceMark rm(THREAD); 3565 for (SignatureStream sstream(signature); !sstream.is_done(); sstream.next()) { 3566 if (sstream.is_object()) { 3567 Symbol* sym = sstream.as_symbol(THREAD); 3568 Symbol* name = sym; 3569 if (sstream.is_array()) { 3570 int i=0; 3571 while (sym->byte_at(i) == '[') i++; 3572 if (i == sym->utf8_length() - 1 ) continue; // primitive array 3573 assert(sym->byte_at(i) == 'L', "Must be a L-type"); 3574 name = SymbolTable::lookup(sym->as_C_string() + i + 1, 3575 sym->utf8_length() - 2 - i, CHECK); 3576 } 3577 bool opinion1 = k1->is_declared_value_type(name); 3578 bool opinion2 = k2->is_declared_value_type(name); 3579 if (sym != name) name->decrement_refcount(); 3580 if (opinion1 != opinion2) { 3581 stringStream ss; 3582 ss.print("signature %s inconsistent value type: %s %s", 3583 signature->as_C_string(), k1->external_name(), k2->external_name()); 3584 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string()); 3585 } 3586 } 3587 } 3588 } 3589 3590 void InstanceKlass::check_symbol_for_value_types_consistency(Symbol* sym, 3591 InstanceKlass* k1, 3592 InstanceKlass* k2, TRAPS) { 3593 if (sym->utf8_length() == 1) return; // Primitive signature 3594 if (!(k1->has_value_types_attribute() || k2->has_value_types_attribute())) return; 3595 assert(sym->byte_at(0) == 'L' || sym->byte_at(0) == '[', "Sanity check"); 3596 ResourceMark rm(THREAD); 3597 Symbol* name; 3598 if (sym->byte_at(0) == 'L') { 3599 name = SymbolTable::lookup(sym->as_C_string() + 1, 3600 sym->utf8_length() - 2, CHECK); 3601 } else { 3602 int i=0; 3603 while (sym->byte_at(i) == '[') i++; 3604 if (i == sym->utf8_length() - 1 ) return; // primitive array 3605 assert(sym->byte_at(i) == 'L', "Must be a L-type"); 3606 name = SymbolTable::lookup(sym->as_C_string() + i + 1, 3607 sym->utf8_length() - 2 - i, CHECK); 3608 } 3609 bool opinion1 = k1->is_declared_value_type(name); 3610 bool opinion2 = k2->is_declared_value_type(name); 3611 name->decrement_refcount(); 3612 if (opinion1 != opinion2) { 3613 stringStream ss; 3614 ss.print("symbol %s inconsistent value type: %s %s", 3615 sym->as_C_string(), k1->external_name(), k2->external_name()); 3616 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string()); 3617 } 3618 } 3619 3620 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data, 3621 const char* module_name, 3622 const ClassFileStream* cfs) const { 3623 if (!log_is_enabled(Info, class, load)) { 3624 return; 3625 } 3626 3627 ResourceMark rm; 3628 LogMessage(class, load) msg; 3629 stringStream info_stream; 3630 3631 // Name and class hierarchy info 3632 info_stream.print("%s", external_name()); 3633 3634 // Source 3635 if (cfs != NULL) { 3636 if (cfs->source() != NULL) { 3637 if (module_name != NULL) { 3638 if (ClassLoader::is_modules_image(cfs->source())) { 3639 info_stream.print(" source: jrt:/%s", module_name); 3640 } else { 3641 info_stream.print(" source: %s", cfs->source()); 3642 } 3643 } else { 3644 info_stream.print(" source: %s", cfs->source()); 3645 } 3646 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) { 3647 Thread* THREAD = Thread::current(); 3648 Klass* caller = 3649 THREAD->is_Java_thread() 3650 ? ((JavaThread*)THREAD)->security_get_caller_class(1) 3651 : NULL; 3652 // caller can be NULL, for example, during a JVMTI VM_Init hook 3653 if (caller != NULL) { 3654 info_stream.print(" source: instance of %s", caller->external_name()); 3655 } else { 3656 // source is unknown 3657 } 3658 } else { 3659 oop class_loader = loader_data->class_loader(); 3660 info_stream.print(" source: %s", class_loader->klass()->external_name()); 3661 } 3662 } else { 3663 info_stream.print(" source: shared objects file"); 3664 } 3665 3666 msg.info("%s", info_stream.as_string()); 3667 3668 if (log_is_enabled(Debug, class, load)) { 3669 stringStream debug_stream; 3670 3671 // Class hierarchy info 3672 debug_stream.print(" klass: " INTPTR_FORMAT " super: " INTPTR_FORMAT, 3673 p2i(this), p2i(superklass())); 3674 3675 // Interfaces 3676 if (local_interfaces() != NULL && local_interfaces()->length() > 0) { 3677 debug_stream.print(" interfaces:"); 3678 int length = local_interfaces()->length(); 3679 for (int i = 0; i < length; i++) { 3680 debug_stream.print(" " INTPTR_FORMAT, 3681 p2i(InstanceKlass::cast(local_interfaces()->at(i)))); 3682 } 3683 } 3684 3685 // Class loader 3686 debug_stream.print(" loader: ["); 3687 loader_data->print_value_on(&debug_stream); 3688 debug_stream.print("]"); 3689 3690 // Classfile checksum 3691 if (cfs) { 3692 debug_stream.print(" bytes: %d checksum: %08x", 3693 cfs->length(), 3694 ClassLoader::crc32(0, (const char*)cfs->buffer(), 3695 cfs->length())); 3696 } 3697 3698 msg.debug("%s", debug_stream.as_string()); 3699 } 3700 } 3701 3702 #if INCLUDE_SERVICES 3703 // Size Statistics 3704 void InstanceKlass::collect_statistics(KlassSizeStats *sz) const { 3705 Klass::collect_statistics(sz); 3706 3707 sz->_inst_size = wordSize * size_helper(); 3708 sz->_vtab_bytes = wordSize * vtable_length(); 3709 sz->_itab_bytes = wordSize * itable_length(); 3710 sz->_nonstatic_oopmap_bytes = wordSize * nonstatic_oop_map_size(); 3711 3712 int n = 0; 3713 n += (sz->_methods_array_bytes = sz->count_array(methods())); 3714 n += (sz->_method_ordering_bytes = sz->count_array(method_ordering())); 3715 n += (sz->_local_interfaces_bytes = sz->count_array(local_interfaces())); 3716 n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces())); 3717 n += (sz->_fields_bytes = sz->count_array(fields())); 3718 n += (sz->_inner_classes_bytes = sz->count_array(inner_classes())); 3719 n += (sz->_nest_members_bytes = sz->count_array(nest_members())); 3720 sz->_ro_bytes += n; 3721 3722 const ConstantPool* cp = constants(); 3723 if (cp) { 3724 cp->collect_statistics(sz); 3725 } 3726 3727 const Annotations* anno = annotations(); 3728 if (anno) { 3729 anno->collect_statistics(sz); 3730 } 3731 3732 const Array<Method*>* methods_array = methods(); 3733 if (methods()) { 3734 for (int i = 0; i < methods_array->length(); i++) { 3735 Method* method = methods_array->at(i); 3736 if (method) { 3737 sz->_method_count ++; 3738 method->collect_statistics(sz); 3739 } 3740 } 3741 } 3742 } 3743 #endif // INCLUDE_SERVICES 3744 3745 // Verification 3746 3747 class VerifyFieldClosure: public BasicOopIterateClosure { 3748 protected: 3749 template <class T> void do_oop_work(T* p) { 3750 oop obj = RawAccess<>::oop_load(p); 3751 if (!oopDesc::is_oop_or_null(obj)) { 3752 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj)); 3753 Universe::print_on(tty); 3754 guarantee(false, "boom"); 3755 } 3756 } 3757 public: 3758 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); } 3759 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); } 3760 }; 3761 3762 void InstanceKlass::verify_on(outputStream* st) { 3763 #ifndef PRODUCT 3764 // Avoid redundant verifies, this really should be in product. 3765 if (_verify_count == Universe::verify_count()) return; 3766 _verify_count = Universe::verify_count(); 3767 #endif 3768 3769 // Verify Klass 3770 Klass::verify_on(st); 3771 3772 // Verify that klass is present in ClassLoaderData 3773 guarantee(class_loader_data()->contains_klass(this), 3774 "this class isn't found in class loader data"); 3775 3776 // Verify vtables 3777 if (is_linked()) { 3778 // $$$ This used to be done only for m/s collections. Doing it 3779 // always seemed a valid generalization. (DLD -- 6/00) 3780 vtable().verify(st); 3781 } 3782 3783 // Verify first subklass 3784 if (subklass() != NULL) { 3785 guarantee(subklass()->is_klass(), "should be klass"); 3786 } 3787 3788 // Verify siblings 3789 Klass* super = this->super(); 3790 Klass* sib = next_sibling(); 3791 if (sib != NULL) { 3792 if (sib == this) { 3793 fatal("subclass points to itself " PTR_FORMAT, p2i(sib)); 3794 } 3795 3796 guarantee(sib->is_klass(), "should be klass"); 3797 guarantee(sib->super() == super, "siblings should have same superklass"); 3798 } 3799 3800 // Verify implementor fields requires the Compile_lock, but this is sometimes 3801 // called inside a safepoint, so don't verify. 3802 3803 // Verify local interfaces 3804 if (local_interfaces()) { 3805 Array<InstanceKlass*>* local_interfaces = this->local_interfaces(); 3806 for (int j = 0; j < local_interfaces->length(); j++) { 3807 InstanceKlass* e = local_interfaces->at(j); 3808 guarantee(e->is_klass() && e->is_interface(), "invalid local interface"); 3809 } 3810 } 3811 3812 // Verify transitive interfaces 3813 if (transitive_interfaces() != NULL) { 3814 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces(); 3815 for (int j = 0; j < transitive_interfaces->length(); j++) { 3816 InstanceKlass* e = transitive_interfaces->at(j); 3817 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface"); 3818 } 3819 } 3820 3821 // Verify methods 3822 if (methods() != NULL) { 3823 Array<Method*>* methods = this->methods(); 3824 for (int j = 0; j < methods->length(); j++) { 3825 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3826 } 3827 for (int j = 0; j < methods->length() - 1; j++) { 3828 Method* m1 = methods->at(j); 3829 Method* m2 = methods->at(j + 1); 3830 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3831 } 3832 } 3833 3834 // Verify method ordering 3835 if (method_ordering() != NULL) { 3836 Array<int>* method_ordering = this->method_ordering(); 3837 int length = method_ordering->length(); 3838 if (JvmtiExport::can_maintain_original_method_order() || 3839 ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) { 3840 guarantee(length == methods()->length(), "invalid method ordering length"); 3841 jlong sum = 0; 3842 for (int j = 0; j < length; j++) { 3843 int original_index = method_ordering->at(j); 3844 guarantee(original_index >= 0, "invalid method ordering index"); 3845 guarantee(original_index < length, "invalid method ordering index"); 3846 sum += original_index; 3847 } 3848 // Verify sum of indices 0,1,...,length-1 3849 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum"); 3850 } else { 3851 guarantee(length == 0, "invalid method ordering length"); 3852 } 3853 } 3854 3855 // Verify default methods 3856 if (default_methods() != NULL) { 3857 Array<Method*>* methods = this->default_methods(); 3858 for (int j = 0; j < methods->length(); j++) { 3859 guarantee(methods->at(j)->is_method(), "non-method in methods array"); 3860 } 3861 for (int j = 0; j < methods->length() - 1; j++) { 3862 Method* m1 = methods->at(j); 3863 Method* m2 = methods->at(j + 1); 3864 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly"); 3865 } 3866 } 3867 3868 // Verify JNI static field identifiers 3869 if (jni_ids() != NULL) { 3870 jni_ids()->verify(this); 3871 } 3872 3873 // Verify other fields 3874 if (array_klasses() != NULL) { 3875 guarantee(array_klasses()->is_klass(), "should be klass"); 3876 } 3877 if (constants() != NULL) { 3878 guarantee(constants()->is_constantPool(), "should be constant pool"); 3879 } 3880 const Klass* host = host_klass(); 3881 if (host != NULL) { 3882 guarantee(host->is_klass(), "should be klass"); 3883 } 3884 } 3885 3886 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) { 3887 Klass::oop_verify_on(obj, st); 3888 VerifyFieldClosure blk; 3889 obj->oop_iterate(&blk); 3890 } 3891 3892 3893 // JNIid class for jfieldIDs only 3894 // Note to reviewers: 3895 // These JNI functions are just moved over to column 1 and not changed 3896 // in the compressed oops workspace. 3897 JNIid::JNIid(Klass* holder, int offset, JNIid* next) { 3898 _holder = holder; 3899 _offset = offset; 3900 _next = next; 3901 debug_only(_is_static_field_id = false;) 3902 } 3903 3904 3905 JNIid* JNIid::find(int offset) { 3906 JNIid* current = this; 3907 while (current != NULL) { 3908 if (current->offset() == offset) return current; 3909 current = current->next(); 3910 } 3911 return NULL; 3912 } 3913 3914 void JNIid::deallocate(JNIid* current) { 3915 while (current != NULL) { 3916 JNIid* next = current->next(); 3917 delete current; 3918 current = next; 3919 } 3920 } 3921 3922 3923 void JNIid::verify(Klass* holder) { 3924 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields(); 3925 int end_field_offset; 3926 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize); 3927 3928 JNIid* current = this; 3929 while (current != NULL) { 3930 guarantee(current->holder() == holder, "Invalid klass in JNIid"); 3931 #ifdef ASSERT 3932 int o = current->offset(); 3933 if (current->is_static_field_id()) { 3934 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid"); 3935 } 3936 #endif 3937 current = current->next(); 3938 } 3939 } 3940 3941 #ifdef ASSERT 3942 void InstanceKlass::set_init_state(ClassState state) { 3943 bool good_state = is_shared() ? (_init_state <= state) 3944 : (_init_state < state); 3945 assert(good_state || state == allocated, "illegal state transition"); 3946 _init_state = (u1)state; 3947 } 3948 #endif 3949 3950 #if INCLUDE_JVMTI 3951 3952 // RedefineClasses() support for previous versions 3953 3954 // Globally, there is at least one previous version of a class to walk 3955 // during class unloading, which is saved because old methods in the class 3956 // are still running. Otherwise the previous version list is cleaned up. 3957 bool InstanceKlass::_has_previous_versions = false; 3958 3959 // Returns true if there are previous versions of a class for class 3960 // unloading only. Also resets the flag to false. purge_previous_version 3961 // will set the flag to true if there are any left, i.e., if there's any 3962 // work to do for next time. This is to avoid the expensive code cache 3963 // walk in CLDG::clean_deallocate_lists(). 3964 bool InstanceKlass::has_previous_versions_and_reset() { 3965 bool ret = _has_previous_versions; 3966 log_trace(redefine, class, iklass, purge)("Class unloading: has_previous_versions = %s", 3967 ret ? "true" : "false"); 3968 _has_previous_versions = false; 3969 return ret; 3970 } 3971 3972 // Purge previous versions before adding new previous versions of the class and 3973 // during class unloading. 3974 void InstanceKlass::purge_previous_version_list() { 3975 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); 3976 assert(has_been_redefined(), "Should only be called for main class"); 3977 3978 // Quick exit. 3979 if (previous_versions() == NULL) { 3980 return; 3981 } 3982 3983 // This klass has previous versions so see what we can cleanup 3984 // while it is safe to do so. 3985 3986 int deleted_count = 0; // leave debugging breadcrumbs 3987 int live_count = 0; 3988 ClassLoaderData* loader_data = class_loader_data(); 3989 assert(loader_data != NULL, "should never be null"); 3990 3991 ResourceMark rm; 3992 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name()); 3993 3994 // previous versions are linked together through the InstanceKlass 3995 InstanceKlass* pv_node = previous_versions(); 3996 InstanceKlass* last = this; 3997 int version = 0; 3998 3999 // check the previous versions list 4000 for (; pv_node != NULL; ) { 4001 4002 ConstantPool* pvcp = pv_node->constants(); 4003 assert(pvcp != NULL, "cp ref was unexpectedly cleared"); 4004 4005 if (!pvcp->on_stack()) { 4006 // If the constant pool isn't on stack, none of the methods 4007 // are executing. Unlink this previous_version. 4008 // The previous version InstanceKlass is on the ClassLoaderData deallocate list 4009 // so will be deallocated during the next phase of class unloading. 4010 log_trace(redefine, class, iklass, purge) 4011 ("previous version " INTPTR_FORMAT " is dead.", p2i(pv_node)); 4012 // For debugging purposes. 4013 pv_node->set_is_scratch_class(); 4014 // Unlink from previous version list. 4015 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data"); 4016 InstanceKlass* next = pv_node->previous_versions(); 4017 pv_node->link_previous_versions(NULL); // point next to NULL 4018 last->link_previous_versions(next); 4019 // Add to the deallocate list after unlinking 4020 loader_data->add_to_deallocate_list(pv_node); 4021 pv_node = next; 4022 deleted_count++; 4023 version++; 4024 continue; 4025 } else { 4026 log_trace(redefine, class, iklass, purge)("previous version " INTPTR_FORMAT " is alive", p2i(pv_node)); 4027 assert(pvcp->pool_holder() != NULL, "Constant pool with no holder"); 4028 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack"); 4029 live_count++; 4030 // found a previous version for next time we do class unloading 4031 _has_previous_versions = true; 4032 } 4033 4034 // At least one method is live in this previous version. 4035 // Reset dead EMCP methods not to get breakpoints. 4036 // All methods are deallocated when all of the methods for this class are no 4037 // longer running. 4038 Array<Method*>* method_refs = pv_node->methods(); 4039 if (method_refs != NULL) { 4040 log_trace(redefine, class, iklass, purge)("previous methods length=%d", method_refs->length()); 4041 for (int j = 0; j < method_refs->length(); j++) { 4042 Method* method = method_refs->at(j); 4043 4044 if (!method->on_stack()) { 4045 // no breakpoints for non-running methods 4046 if (method->is_running_emcp()) { 4047 method->set_running_emcp(false); 4048 } 4049 } else { 4050 assert (method->is_obsolete() || method->is_running_emcp(), 4051 "emcp method cannot run after emcp bit is cleared"); 4052 log_trace(redefine, class, iklass, purge) 4053 ("purge: %s(%s): prev method @%d in version @%d is alive", 4054 method->name()->as_C_string(), method->signature()->as_C_string(), j, version); 4055 } 4056 } 4057 } 4058 // next previous version 4059 last = pv_node; 4060 pv_node = pv_node->previous_versions(); 4061 version++; 4062 } 4063 log_trace(redefine, class, iklass, purge) 4064 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count); 4065 } 4066 4067 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods, 4068 int emcp_method_count) { 4069 int obsolete_method_count = old_methods->length() - emcp_method_count; 4070 4071 if (emcp_method_count != 0 && obsolete_method_count != 0 && 4072 _previous_versions != NULL) { 4073 // We have a mix of obsolete and EMCP methods so we have to 4074 // clear out any matching EMCP method entries the hard way. 4075 int local_count = 0; 4076 for (int i = 0; i < old_methods->length(); i++) { 4077 Method* old_method = old_methods->at(i); 4078 if (old_method->is_obsolete()) { 4079 // only obsolete methods are interesting 4080 Symbol* m_name = old_method->name(); 4081 Symbol* m_signature = old_method->signature(); 4082 4083 // previous versions are linked together through the InstanceKlass 4084 int j = 0; 4085 for (InstanceKlass* prev_version = _previous_versions; 4086 prev_version != NULL; 4087 prev_version = prev_version->previous_versions(), j++) { 4088 4089 Array<Method*>* method_refs = prev_version->methods(); 4090 for (int k = 0; k < method_refs->length(); k++) { 4091 Method* method = method_refs->at(k); 4092 4093 if (!method->is_obsolete() && 4094 method->name() == m_name && 4095 method->signature() == m_signature) { 4096 // The current RedefineClasses() call has made all EMCP 4097 // versions of this method obsolete so mark it as obsolete 4098 log_trace(redefine, class, iklass, add) 4099 ("%s(%s): flush obsolete method @%d in version @%d", 4100 m_name->as_C_string(), m_signature->as_C_string(), k, j); 4101 4102 method->set_is_obsolete(); 4103 break; 4104 } 4105 } 4106 4107 // The previous loop may not find a matching EMCP method, but 4108 // that doesn't mean that we can optimize and not go any 4109 // further back in the PreviousVersion generations. The EMCP 4110 // method for this generation could have already been made obsolete, 4111 // but there still may be an older EMCP method that has not 4112 // been made obsolete. 4113 } 4114 4115 if (++local_count >= obsolete_method_count) { 4116 // no more obsolete methods so bail out now 4117 break; 4118 } 4119 } 4120 } 4121 } 4122 } 4123 4124 // Save the scratch_class as the previous version if any of the methods are running. 4125 // The previous_versions are used to set breakpoints in EMCP methods and they are 4126 // also used to clean MethodData links to redefined methods that are no longer running. 4127 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class, 4128 int emcp_method_count) { 4129 assert(Thread::current()->is_VM_thread(), 4130 "only VMThread can add previous versions"); 4131 4132 ResourceMark rm; 4133 log_trace(redefine, class, iklass, add) 4134 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count); 4135 4136 // Clean out old previous versions for this class 4137 purge_previous_version_list(); 4138 4139 // Mark newly obsolete methods in remaining previous versions. An EMCP method from 4140 // a previous redefinition may be made obsolete by this redefinition. 4141 Array<Method*>* old_methods = scratch_class->methods(); 4142 mark_newly_obsolete_methods(old_methods, emcp_method_count); 4143 4144 // If the constant pool for this previous version of the class 4145 // is not marked as being on the stack, then none of the methods 4146 // in this previous version of the class are on the stack so 4147 // we don't need to add this as a previous version. 4148 ConstantPool* cp_ref = scratch_class->constants(); 4149 if (!cp_ref->on_stack()) { 4150 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running"); 4151 // For debugging purposes. 4152 scratch_class->set_is_scratch_class(); 4153 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class); 4154 return; 4155 } 4156 4157 if (emcp_method_count != 0) { 4158 // At least one method is still running, check for EMCP methods 4159 for (int i = 0; i < old_methods->length(); i++) { 4160 Method* old_method = old_methods->at(i); 4161 if (!old_method->is_obsolete() && old_method->on_stack()) { 4162 // if EMCP method (not obsolete) is on the stack, mark as EMCP so that 4163 // we can add breakpoints for it. 4164 4165 // We set the method->on_stack bit during safepoints for class redefinition 4166 // and use this bit to set the is_running_emcp bit. 4167 // After the safepoint, the on_stack bit is cleared and the running emcp 4168 // method may exit. If so, we would set a breakpoint in a method that 4169 // is never reached, but this won't be noticeable to the programmer. 4170 old_method->set_running_emcp(true); 4171 log_trace(redefine, class, iklass, add) 4172 ("EMCP method %s is on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method)); 4173 } else if (!old_method->is_obsolete()) { 4174 log_trace(redefine, class, iklass, add) 4175 ("EMCP method %s is NOT on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method)); 4176 } 4177 } 4178 } 4179 4180 // Add previous version if any methods are still running. 4181 // Set has_previous_version flag for processing during class unloading. 4182 _has_previous_versions = true; 4183 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack."); 4184 assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version"); 4185 scratch_class->link_previous_versions(previous_versions()); 4186 link_previous_versions(scratch_class); 4187 } // end add_previous_version() 4188 4189 #endif // INCLUDE_JVMTI 4190 4191 Method* InstanceKlass::method_with_idnum(int idnum) { 4192 Method* m = NULL; 4193 if (idnum < methods()->length()) { 4194 m = methods()->at(idnum); 4195 } 4196 if (m == NULL || m->method_idnum() != idnum) { 4197 for (int index = 0; index < methods()->length(); ++index) { 4198 m = methods()->at(index); 4199 if (m->method_idnum() == idnum) { 4200 return m; 4201 } 4202 } 4203 // None found, return null for the caller to handle. 4204 return NULL; 4205 } 4206 return m; 4207 } 4208 4209 4210 Method* InstanceKlass::method_with_orig_idnum(int idnum) { 4211 if (idnum >= methods()->length()) { 4212 return NULL; 4213 } 4214 Method* m = methods()->at(idnum); 4215 if (m != NULL && m->orig_method_idnum() == idnum) { 4216 return m; 4217 } 4218 // Obsolete method idnum does not match the original idnum 4219 for (int index = 0; index < methods()->length(); ++index) { 4220 m = methods()->at(index); 4221 if (m->orig_method_idnum() == idnum) { 4222 return m; 4223 } 4224 } 4225 // None found, return null for the caller to handle. 4226 return NULL; 4227 } 4228 4229 4230 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) { 4231 InstanceKlass* holder = get_klass_version(version); 4232 if (holder == NULL) { 4233 return NULL; // The version of klass is gone, no method is found 4234 } 4235 Method* method = holder->method_with_orig_idnum(idnum); 4236 return method; 4237 } 4238 4239 #if INCLUDE_JVMTI 4240 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() { 4241 if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 4242 // Ignore the archived class stream data 4243 return NULL; 4244 } else { 4245 return _cached_class_file; 4246 } 4247 } 4248 4249 jint InstanceKlass::get_cached_class_file_len() { 4250 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file); 4251 } 4252 4253 unsigned char * InstanceKlass::get_cached_class_file_bytes() { 4254 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file); 4255 } 4256 4257 #if INCLUDE_CDS 4258 JvmtiCachedClassFileData* InstanceKlass::get_archived_class_data() { 4259 if (DumpSharedSpaces) { 4260 return _cached_class_file; 4261 } else { 4262 assert(this->is_shared(), "class should be shared"); 4263 if (MetaspaceShared::is_in_shared_metaspace(_cached_class_file)) { 4264 return _cached_class_file; 4265 } else { 4266 return NULL; 4267 } 4268 } 4269 } 4270 #endif 4271 #endif 4272 4273 #define THROW_DVT_ERROR(s) \ 4274 Exceptions::fthrow(THREAD_AND_LOCATION, vmSymbols::java_lang_IncompatibleClassChangeError(), \ 4275 "ValueCapableClass class '%s' %s", external_name(),(s)); \ 4276 return