9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
11 * version 2 for more details (a copy is included in the LICENSE file that
12 * accompanied this code).
13 *
14 * You should have received a copy of the GNU General Public License version
15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "code/compiledIC.hpp"
26 #include "code/compiledMethod.inline.hpp"
27 #include "code/scopeDesc.hpp"
28 #include "code/codeCache.hpp"
29 #include "gc/shared/barrierSet.hpp"
30 #include "gc/shared/gcBehaviours.hpp"
31 #include "interpreter/bytecode.inline.hpp"
32 #include "logging/log.hpp"
33 #include "logging/logTag.hpp"
34 #include "memory/resourceArea.hpp"
35 #include "oops/methodData.hpp"
36 #include "oops/method.inline.hpp"
37 #include "prims/methodHandles.hpp"
38 #include "runtime/handles.inline.hpp"
39 #include "runtime/mutexLocker.hpp"
40
41 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout,
42 int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps,
43 bool caller_must_gc_arguments)
44 : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
45 _mark_for_deoptimization_status(not_marked),
46 _method(method),
47 _gc_data(NULL)
|
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
11 * version 2 for more details (a copy is included in the LICENSE file that
12 * accompanied this code).
13 *
14 * You should have received a copy of the GNU General Public License version
15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "code/compiledIC.hpp"
26 #include "code/compiledMethod.inline.hpp"
27 #include "code/scopeDesc.hpp"
28 #include "code/codeCache.hpp"
29 #include "code/icBuffer.hpp"
30 #include "gc/shared/barrierSet.hpp"
31 #include "gc/shared/gcBehaviours.hpp"
32 #include "interpreter/bytecode.inline.hpp"
33 #include "logging/log.hpp"
34 #include "logging/logTag.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/method.inline.hpp"
38 #include "prims/methodHandles.hpp"
39 #include "runtime/handles.inline.hpp"
40 #include "runtime/mutexLocker.hpp"
41
42 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout,
43 int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps,
44 bool caller_must_gc_arguments)
45 : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
46 _mark_for_deoptimization_status(not_marked),
47 _method(method),
48 _gc_data(NULL)
|
412
413 #ifdef ASSERT
414 // Check class_loader is alive for this bit of metadata.
415 static void check_class(Metadata* md) {
416 Klass* klass = NULL;
417 if (md->is_klass()) {
418 klass = ((Klass*)md);
419 } else if (md->is_method()) {
420 klass = ((Method*)md)->method_holder();
421 } else if (md->is_methodData()) {
422 klass = ((MethodData*)md)->method()->method_holder();
423 } else {
424 md->print();
425 ShouldNotReachHere();
426 }
427 assert(klass->is_loader_alive(), "must be alive");
428 }
429 #endif // ASSERT
430
431
432 void CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic) {
433 if (ic->is_icholder_call()) {
434 // The only exception is compiledICHolder metdata which may
435 // yet be marked below. (We check this further below).
436 CompiledICHolder* cichk_metdata = ic->cached_icholder();
437
438 if (cichk_metdata->is_loader_alive()) {
439 return;
440 }
441 } else {
442 Metadata* ic_metdata = ic->cached_metadata();
443 if (ic_metdata != NULL) {
444 if (ic_metdata->is_klass()) {
445 if (((Klass*)ic_metdata)->is_loader_alive()) {
446 return;
447 }
448 } else if (ic_metdata->is_method()) {
449 Method* method = (Method*)ic_metdata;
450 assert(!method->is_old(), "old method should have been cleaned");
451 if (method->method_holder()->is_loader_alive()) {
452 return;
453 }
454 } else {
455 ShouldNotReachHere();
456 }
457 }
458 }
459
460 ic->set_to_clean();
461 }
462
463 // static_stub_Relocations may have dangling references to
464 // nmethods so trim them out here. Otherwise it looks like
465 // compiled code is maintaining a link to dead metadata.
466 void CompiledMethod::clean_ic_stubs() {
467 #ifdef ASSERT
468 address low_boundary = oops_reloc_begin();
469 RelocIterator iter(this, low_boundary);
470 while (iter.next()) {
471 address static_call_addr = NULL;
472 if (iter.type() == relocInfo::opt_virtual_call_type) {
473 CompiledIC* cic = CompiledIC_at(&iter);
474 if (!cic->is_call_to_interpreted()) {
475 static_call_addr = iter.addr();
476 }
477 } else if (iter.type() == relocInfo::static_call_type) {
478 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
479 if (!csc->is_call_to_interpreted()) {
480 static_call_addr = iter.addr();
481 }
482 }
483 if (static_call_addr != NULL) {
484 RelocIterator sciter(this, low_boundary);
485 while (sciter.next()) {
486 if (sciter.type() == relocInfo::static_stub_type &&
487 sciter.static_stub_reloc()->static_call() == static_call_addr) {
488 sciter.static_stub_reloc()->clear_inline_cache();
489 }
490 }
491 }
492 }
493 #endif
494 }
495
496 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
497 template <class CompiledICorStaticCall>
498 static void clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
499 bool clean_all) {
500 // Ok, to lookup references to zombies here
501 CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
502 CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
503 if (nm != NULL) {
504 // Clean inline caches pointing to both zombie and not_entrant methods
505 if (clean_all || !nm->is_in_use() || nm->is_unloading() || (nm->method()->code() != nm)) {
506 ic->set_to_clean(from->is_alive());
507 assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
508 }
509 }
510 }
511
512 static void clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from,
513 bool clean_all) {
514 clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from, clean_all);
515 }
516
517 static void clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from,
518 bool clean_all) {
519 clean_if_nmethod_is_unloaded(csc, csc->destination(), from, clean_all);
520 }
521
522 // Cleans caches in nmethods that point to either classes that are unloaded
523 // or nmethods that are unloaded.
524 //
525 // Can be called either in parallel by G1 currently or after all
526 // nmethods are unloaded. Return postponed=true in the parallel case for
527 // inline caches found that point to nmethods that are not yet visited during
528 // the do_unloading walk.
529 void CompiledMethod::unload_nmethod_caches(bool unloading_occurred) {
530 ResourceMark rm;
531
532 // Exception cache only needs to be called if unloading occurred
533 if (unloading_occurred) {
534 clean_exception_cache();
535 }
536
537 cleanup_inline_caches_impl(unloading_occurred, false);
538
539 // All static stubs need to be cleaned.
540 clean_ic_stubs();
541
542 // Check that the metadata embedded in the nmethod is alive
543 DEBUG_ONLY(metadata_do(check_class));
544 }
545
546 // Called to clean up after class unloading for live nmethods and from the sweeper
547 // for all methods.
548 void CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
549 assert(CompiledICLocker::is_safe(this), "mt unsafe call");
550 ResourceMark rm;
551
552 // Find all calls in an nmethod and clear the ones that point to non-entrant,
553 // zombie and unloaded nmethods.
554 RelocIterator iter(this, oops_reloc_begin());
555 while(iter.next()) {
556
557 switch (iter.type()) {
558
559 case relocInfo::virtual_call_type:
560 if (unloading_occurred) {
561 // If class unloading occurred we first clear ICs where the cached metadata
562 // is referring to an unloaded klass or method.
563 clean_ic_if_metadata_is_dead(CompiledIC_at(&iter));
564 }
565
566 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all);
567 break;
568
569 case relocInfo::opt_virtual_call_type:
570 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all);
571 break;
572
573 case relocInfo::static_call_type:
574 clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this, clean_all);
575 break;
576
577 case relocInfo::oop_type:
578 break;
579
580 case relocInfo::metadata_type:
581 break; // nothing to do.
582
583 default:
584 break;
585 }
586 }
587 }
588
589 // Iterating over all nmethods, e.g. with the help of CodeCache::nmethods_do(fun) was found
590 // to not be inherently safe. There is a chance that fields are seen which are not properly
591 // initialized. This happens despite the fact that nmethods_do() asserts the CodeCache_lock
592 // to be held.
593 // To bundle knowledge about necessary checks in one place, this function was introduced.
594 // It is not claimed that these checks are sufficient, but they were found to be necessary.
595 bool CompiledMethod::nmethod_access_is_safe(nmethod* nm) {
596 Method* method = (nm == NULL) ? NULL : nm->method(); // nm->method() may be uninitialized, i.e. != NULL, but invalid
597 return (nm != NULL) && (method != NULL) && (method->signature() != NULL) &&
598 !nm->is_zombie() && !nm->is_not_installed() &&
599 os::is_readable_pointer(method) &&
600 os::is_readable_pointer(method->constants()) &&
601 os::is_readable_pointer(method->signature());
602 }
|
413
414 #ifdef ASSERT
415 // Check class_loader is alive for this bit of metadata.
416 static void check_class(Metadata* md) {
417 Klass* klass = NULL;
418 if (md->is_klass()) {
419 klass = ((Klass*)md);
420 } else if (md->is_method()) {
421 klass = ((Method*)md)->method_holder();
422 } else if (md->is_methodData()) {
423 klass = ((MethodData*)md)->method()->method_holder();
424 } else {
425 md->print();
426 ShouldNotReachHere();
427 }
428 assert(klass->is_loader_alive(), "must be alive");
429 }
430 #endif // ASSERT
431
432
433 bool CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic) {
434 if (ic->is_icholder_call()) {
435 // The only exception is compiledICHolder metdata which may
436 // yet be marked below. (We check this further below).
437 CompiledICHolder* cichk_metdata = ic->cached_icholder();
438
439 if (cichk_metdata->is_loader_alive()) {
440 return true;
441 }
442 } else {
443 Metadata* ic_metdata = ic->cached_metadata();
444 if (ic_metdata != NULL) {
445 if (ic_metdata->is_klass()) {
446 if (((Klass*)ic_metdata)->is_loader_alive()) {
447 return true;
448 }
449 } else if (ic_metdata->is_method()) {
450 Method* method = (Method*)ic_metdata;
451 assert(!method->is_old(), "old method should have been cleaned");
452 if (method->method_holder()->is_loader_alive()) {
453 return true;
454 }
455 } else {
456 ShouldNotReachHere();
457 }
458 }
459 }
460
461 return ic->set_to_clean();
462 }
463
464 // static_stub_Relocations may have dangling references to
465 // nmethods so trim them out here. Otherwise it looks like
466 // compiled code is maintaining a link to dead metadata.
467 void CompiledMethod::clean_ic_stubs() {
468 #ifdef ASSERT
469 address low_boundary = oops_reloc_begin();
470 RelocIterator iter(this, low_boundary);
471 while (iter.next()) {
472 address static_call_addr = NULL;
473 if (iter.type() == relocInfo::opt_virtual_call_type) {
474 CompiledIC* cic = CompiledIC_at(&iter);
475 if (!cic->is_call_to_interpreted()) {
476 static_call_addr = iter.addr();
477 }
478 } else if (iter.type() == relocInfo::static_call_type) {
479 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
480 if (!csc->is_call_to_interpreted()) {
481 static_call_addr = iter.addr();
482 }
483 }
484 if (static_call_addr != NULL) {
485 RelocIterator sciter(this, low_boundary);
486 while (sciter.next()) {
487 if (sciter.type() == relocInfo::static_stub_type &&
488 sciter.static_stub_reloc()->static_call() == static_call_addr) {
489 sciter.static_stub_reloc()->clear_inline_cache();
490 }
491 }
492 }
493 }
494 #endif
495 }
496
497 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
498 template <class CompiledICorStaticCall>
499 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
500 bool clean_all) {
501 // Ok, to lookup references to zombies here
502 CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
503 CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
504 if (nm != NULL) {
505 // Clean inline caches pointing to both zombie and not_entrant methods
506 if (clean_all || !nm->is_in_use() || nm->is_unloading() || (nm->method()->code() != nm)) {
507 if (!ic->set_to_clean(from->is_alive())) {
508 return false;
509 }
510 assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
511 }
512 }
513 return true;
514 }
515
516 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from,
517 bool clean_all) {
518 return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from, clean_all);
519 }
520
521 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from,
522 bool clean_all) {
523 return clean_if_nmethod_is_unloaded(csc, csc->destination(), from, clean_all);
524 }
525
526 // Cleans caches in nmethods that point to either classes that are unloaded
527 // or nmethods that are unloaded.
528 //
529 // Can be called either in parallel by G1 currently or after all
530 // nmethods are unloaded. Return postponed=true in the parallel case for
531 // inline caches found that point to nmethods that are not yet visited during
532 // the do_unloading walk.
533 bool CompiledMethod::unload_nmethod_caches(bool unloading_occurred) {
534 ResourceMark rm;
535
536 // Exception cache only needs to be called if unloading occurred
537 if (unloading_occurred) {
538 clean_exception_cache();
539 }
540
541 if (!cleanup_inline_caches_impl(unloading_occurred, false)) {
542 return false;
543 }
544
545 // All static stubs need to be cleaned.
546 clean_ic_stubs();
547
548 // Check that the metadata embedded in the nmethod is alive
549 DEBUG_ONLY(metadata_do(check_class));
550 return true;
551 }
552
553 void CompiledMethod::cleanup_inline_caches(bool clean_all) {
554 for (;;) {
555 { CompiledICLocker ic_locker(this);
556 if (cleanup_inline_caches_impl(false, clean_all)) {
557 return;
558 }
559 }
560 InlineCacheBuffer::refill_ic_stubs();
561 }
562 }
563
564 // Called to clean up after class unloading for live nmethods and from the sweeper
565 // for all methods.
566 bool CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
567 assert(CompiledICLocker::is_safe(this), "mt unsafe call");
568 ResourceMark rm;
569
570 // Find all calls in an nmethod and clear the ones that point to non-entrant,
571 // zombie and unloaded nmethods.
572 RelocIterator iter(this, oops_reloc_begin());
573 while(iter.next()) {
574
575 switch (iter.type()) {
576
577 case relocInfo::virtual_call_type:
578 if (unloading_occurred) {
579 // If class unloading occurred we first clear ICs where the cached metadata
580 // is referring to an unloaded klass or method.
581 if (!clean_ic_if_metadata_is_dead(CompiledIC_at(&iter))) {
582 return false;
583 }
584 }
585
586 if (!clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all)) {
587 return false;
588 }
589 break;
590
591 case relocInfo::opt_virtual_call_type:
592 if (!clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all)) {
593 return false;
594 }
595 break;
596
597 case relocInfo::static_call_type:
598 if (!clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this, clean_all)) {
599 return false;
600 }
601 break;
602
603 default:
604 break;
605 }
606 }
607
608 return true;
609 }
610
611 // Iterating over all nmethods, e.g. with the help of CodeCache::nmethods_do(fun) was found
612 // to not be inherently safe. There is a chance that fields are seen which are not properly
613 // initialized. This happens despite the fact that nmethods_do() asserts the CodeCache_lock
614 // to be held.
615 // To bundle knowledge about necessary checks in one place, this function was introduced.
616 // It is not claimed that these checks are sufficient, but they were found to be necessary.
617 bool CompiledMethod::nmethod_access_is_safe(nmethod* nm) {
618 Method* method = (nm == NULL) ? NULL : nm->method(); // nm->method() may be uninitialized, i.e. != NULL, but invalid
619 return (nm != NULL) && (method != NULL) && (method->signature() != NULL) &&
620 !nm->is_zombie() && !nm->is_not_installed() &&
621 os::is_readable_pointer(method) &&
622 os::is_readable_pointer(method->constants()) &&
623 os::is_readable_pointer(method->signature());
624 }
|