9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
11 * version 2 for more details (a copy is included in the LICENSE file that
12 * accompanied this code).
13 *
14 * You should have received a copy of the GNU General Public License version
15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "code/compiledIC.hpp"
26 #include "code/compiledMethod.inline.hpp"
27 #include "code/scopeDesc.hpp"
28 #include "code/codeCache.hpp"
29 #include "gc/shared/barrierSet.hpp"
30 #include "gc/shared/gcBehaviours.hpp"
31 #include "interpreter/bytecode.inline.hpp"
32 #include "logging/log.hpp"
33 #include "logging/logTag.hpp"
34 #include "memory/resourceArea.hpp"
35 #include "oops/methodData.hpp"
36 #include "oops/method.inline.hpp"
37 #include "prims/methodHandles.hpp"
38 #include "runtime/handles.inline.hpp"
39 #include "runtime/mutexLocker.hpp"
40
41 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout,
42 int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps,
43 bool caller_must_gc_arguments)
44 : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
45 _mark_for_deoptimization_status(not_marked),
46 _method(method),
47 _gc_data(NULL)
|
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
11 * version 2 for more details (a copy is included in the LICENSE file that
12 * accompanied this code).
13 *
14 * You should have received a copy of the GNU General Public License version
15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "code/compiledIC.hpp"
26 #include "code/compiledMethod.inline.hpp"
27 #include "code/scopeDesc.hpp"
28 #include "code/codeCache.hpp"
29 #include "code/icBuffer.hpp"
30 #include "gc/shared/barrierSet.hpp"
31 #include "gc/shared/gcBehaviours.hpp"
32 #include "interpreter/bytecode.inline.hpp"
33 #include "logging/log.hpp"
34 #include "logging/logTag.hpp"
35 #include "memory/resourceArea.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/method.inline.hpp"
38 #include "prims/methodHandles.hpp"
39 #include "runtime/handles.inline.hpp"
40 #include "runtime/mutexLocker.hpp"
41
42 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout,
43 int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps,
44 bool caller_must_gc_arguments)
45 : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
46 _mark_for_deoptimization_status(not_marked),
47 _method(method),
48 _gc_data(NULL)
|
412
413 #ifdef ASSERT
414 // Check class_loader is alive for this bit of metadata.
415 static void check_class(Metadata* md) {
416 Klass* klass = NULL;
417 if (md->is_klass()) {
418 klass = ((Klass*)md);
419 } else if (md->is_method()) {
420 klass = ((Method*)md)->method_holder();
421 } else if (md->is_methodData()) {
422 klass = ((MethodData*)md)->method()->method_holder();
423 } else {
424 md->print();
425 ShouldNotReachHere();
426 }
427 assert(klass->is_loader_alive(), "must be alive");
428 }
429 #endif // ASSERT
430
431
432 void CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic) {
433 if (ic->is_icholder_call()) {
434 // The only exception is compiledICHolder metdata which may
435 // yet be marked below. (We check this further below).
436 CompiledICHolder* cichk_metdata = ic->cached_icholder();
437
438 if (cichk_metdata->is_loader_alive()) {
439 return;
440 }
441 } else {
442 Metadata* ic_metdata = ic->cached_metadata();
443 if (ic_metdata != NULL) {
444 if (ic_metdata->is_klass()) {
445 if (((Klass*)ic_metdata)->is_loader_alive()) {
446 return;
447 }
448 } else if (ic_metdata->is_method()) {
449 Method* method = (Method*)ic_metdata;
450 assert(!method->is_old(), "old method should have been cleaned");
451 if (method->method_holder()->is_loader_alive()) {
452 return;
453 }
454 } else {
455 ShouldNotReachHere();
456 }
457 }
458 }
459
460 ic->set_to_clean();
461 }
462
463 // static_stub_Relocations may have dangling references to
464 // nmethods so trim them out here. Otherwise it looks like
465 // compiled code is maintaining a link to dead metadata.
466 void CompiledMethod::clean_ic_stubs() {
467 #ifdef ASSERT
468 address low_boundary = oops_reloc_begin();
469 RelocIterator iter(this, low_boundary);
470 while (iter.next()) {
471 address static_call_addr = NULL;
472 if (iter.type() == relocInfo::opt_virtual_call_type) {
473 CompiledIC* cic = CompiledIC_at(&iter);
474 if (!cic->is_call_to_interpreted()) {
475 static_call_addr = iter.addr();
476 }
477 } else if (iter.type() == relocInfo::static_call_type) {
478 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
479 if (!csc->is_call_to_interpreted()) {
480 static_call_addr = iter.addr();
481 }
482 }
483 if (static_call_addr != NULL) {
484 RelocIterator sciter(this, low_boundary);
485 while (sciter.next()) {
486 if (sciter.type() == relocInfo::static_stub_type &&
487 sciter.static_stub_reloc()->static_call() == static_call_addr) {
488 sciter.static_stub_reloc()->clear_inline_cache();
489 }
490 }
491 }
492 }
493 #endif
494 }
495
496 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
497 template <class CompiledICorStaticCall>
498 static void clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
499 bool clean_all) {
500 // Ok, to lookup references to zombies here
501 CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
502 CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
503 if (nm != NULL) {
504 // Clean inline caches pointing to both zombie and not_entrant methods
505 if (clean_all || !nm->is_in_use() || nm->is_unloading() || (nm->method()->code() != nm)) {
506 ic->set_to_clean(from->is_alive());
507 assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
508 }
509 }
510 }
511
512 static void clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from,
513 bool clean_all) {
514 clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from, clean_all);
515 }
516
517 static void clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from,
518 bool clean_all) {
519 clean_if_nmethod_is_unloaded(csc, csc->destination(), from, clean_all);
520 }
521
522 // Cleans caches in nmethods that point to either classes that are unloaded
523 // or nmethods that are unloaded.
524 //
525 // Can be called either in parallel by G1 currently or after all
526 // nmethods are unloaded. Return postponed=true in the parallel case for
527 // inline caches found that point to nmethods that are not yet visited during
528 // the do_unloading walk.
529 void CompiledMethod::unload_nmethod_caches(bool unloading_occurred) {
530 ResourceMark rm;
531
532 // Exception cache only needs to be called if unloading occurred
533 if (unloading_occurred) {
534 clean_exception_cache();
535 }
536
537 cleanup_inline_caches_impl(unloading_occurred, false);
538
539 // All static stubs need to be cleaned.
540 clean_ic_stubs();
541
542 // Check that the metadata embedded in the nmethod is alive
543 DEBUG_ONLY(metadata_do(check_class));
544 }
545
546 // Called to clean up after class unloading for live nmethods and from the sweeper
547 // for all methods.
548 void CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
549 assert(CompiledICLocker::is_safe(this), "mt unsafe call");
550 ResourceMark rm;
551
552 // Find all calls in an nmethod and clear the ones that point to non-entrant,
553 // zombie and unloaded nmethods.
554 RelocIterator iter(this, oops_reloc_begin());
555 while(iter.next()) {
556
557 switch (iter.type()) {
558
559 case relocInfo::virtual_call_type:
560 if (unloading_occurred) {
561 // If class unloading occurred we first clear ICs where the cached metadata
562 // is referring to an unloaded klass or method.
563 clean_ic_if_metadata_is_dead(CompiledIC_at(&iter));
564 }
565
566 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all);
567 break;
568
569 case relocInfo::opt_virtual_call_type:
570 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all);
571 break;
572
573 case relocInfo::static_call_type:
574 clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this, clean_all);
575 break;
576
577 case relocInfo::oop_type:
578 break;
579
580 case relocInfo::metadata_type:
581 break; // nothing to do.
582
583 default:
584 break;
585 }
586 }
587 }
588
589 // Iterating over all nmethods, e.g. with the help of CodeCache::nmethods_do(fun) was found
590 // to not be inherently safe. There is a chance that fields are seen which are not properly
591 // initialized. This happens despite the fact that nmethods_do() asserts the CodeCache_lock
592 // to be held.
593 // To bundle knowledge about necessary checks in one place, this function was introduced.
594 // It is not claimed that these checks are sufficient, but they were found to be necessary.
595 bool CompiledMethod::nmethod_access_is_safe(nmethod* nm) {
596 Method* method = (nm == NULL) ? NULL : nm->method(); // nm->method() may be uninitialized, i.e. != NULL, but invalid
597 return (nm != NULL) && (method != NULL) && (method->signature() != NULL) &&
598 !nm->is_zombie() && !nm->is_not_installed() &&
599 os::is_readable_pointer(method) &&
600 os::is_readable_pointer(method->constants()) &&
601 os::is_readable_pointer(method->signature());
602 }
|
413
414 #ifdef ASSERT
415 // Check class_loader is alive for this bit of metadata.
416 static void check_class(Metadata* md) {
417 Klass* klass = NULL;
418 if (md->is_klass()) {
419 klass = ((Klass*)md);
420 } else if (md->is_method()) {
421 klass = ((Method*)md)->method_holder();
422 } else if (md->is_methodData()) {
423 klass = ((MethodData*)md)->method()->method_holder();
424 } else {
425 md->print();
426 ShouldNotReachHere();
427 }
428 assert(klass->is_loader_alive(), "must be alive");
429 }
430 #endif // ASSERT
431
432
433 bool CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic) {
434 if (ic->is_icholder_call()) {
435 // The only exception is compiledICHolder metdata which may
436 // yet be marked below. (We check this further below).
437 CompiledICHolder* cichk_metdata = ic->cached_icholder();
438
439 if (cichk_metdata->is_loader_alive()) {
440 return true;
441 }
442 } else {
443 Metadata* ic_metdata = ic->cached_metadata();
444 if (ic_metdata != NULL) {
445 if (ic_metdata->is_klass()) {
446 if (((Klass*)ic_metdata)->is_loader_alive()) {
447 return true;
448 }
449 } else if (ic_metdata->is_method()) {
450 Method* method = (Method*)ic_metdata;
451 assert(!method->is_old(), "old method should have been cleaned");
452 if (method->method_holder()->is_loader_alive()) {
453 return true;
454 }
455 } else {
456 ShouldNotReachHere();
457 }
458 }
459 }
460
461 if (ic->is_clean()) {
462 return true;
463 }
464 return ic->set_to_clean();
465 }
466
467 // static_stub_Relocations may have dangling references to
468 // nmethods so trim them out here. Otherwise it looks like
469 // compiled code is maintaining a link to dead metadata.
470 void CompiledMethod::clean_ic_stubs() {
471 #ifdef ASSERT
472 address low_boundary = oops_reloc_begin();
473 RelocIterator iter(this, low_boundary);
474 while (iter.next()) {
475 address static_call_addr = NULL;
476 if (iter.type() == relocInfo::opt_virtual_call_type) {
477 CompiledIC* cic = CompiledIC_at(&iter);
478 if (!cic->is_call_to_interpreted()) {
479 static_call_addr = iter.addr();
480 }
481 } else if (iter.type() == relocInfo::static_call_type) {
482 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
483 if (!csc->is_call_to_interpreted()) {
484 static_call_addr = iter.addr();
485 }
486 }
487 if (static_call_addr != NULL) {
488 RelocIterator sciter(this, low_boundary);
489 while (sciter.next()) {
490 if (sciter.type() == relocInfo::static_stub_type &&
491 sciter.static_stub_reloc()->static_call() == static_call_addr) {
492 sciter.static_stub_reloc()->clear_inline_cache();
493 }
494 }
495 }
496 }
497 #endif
498 }
499
500 // Clean references to unloaded nmethods at addr from this one, which is not unloaded.
501 template <class CompiledICorStaticCall>
502 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
503 bool clean_all) {
504 // Ok, to lookup references to zombies here
505 CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
506 CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
507 if (nm != NULL) {
508 // Clean inline caches pointing to both zombie and not_entrant methods
509 if (clean_all || !nm->is_in_use() || nm->is_unloading() || (nm->method()->code() != nm)) {
510 if (!ic->set_to_clean(from->is_alive())) {
511 return false;
512 }
513 assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
514 }
515 }
516 return true;
517 }
518
519 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from,
520 bool clean_all) {
521 return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from, clean_all);
522 }
523
524 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from,
525 bool clean_all) {
526 return clean_if_nmethod_is_unloaded(csc, csc->destination(), from, clean_all);
527 }
528
529 // Cleans caches in nmethods that point to either classes that are unloaded
530 // or nmethods that are unloaded.
531 //
532 // Can be called either in parallel by G1 currently or after all
533 // nmethods are unloaded. Return postponed=true in the parallel case for
534 // inline caches found that point to nmethods that are not yet visited during
535 // the do_unloading walk.
536 bool CompiledMethod::unload_nmethod_caches(bool unloading_occurred) {
537 ResourceMark rm;
538
539 // Exception cache only needs to be called if unloading occurred
540 if (unloading_occurred) {
541 clean_exception_cache();
542 }
543
544 if (!cleanup_inline_caches_impl(unloading_occurred, false)) {
545 return false;
546 }
547
548 // All static stubs need to be cleaned.
549 clean_ic_stubs();
550
551 // Check that the metadata embedded in the nmethod is alive
552 DEBUG_ONLY(metadata_do(check_class));
553 return true;
554 }
555
556 void CompiledMethod::cleanup_inline_caches(bool clean_all) {
557 for (;;) {
558 { CompiledICLocker ic_locker(this);
559 if (cleanup_inline_caches_impl(false, clean_all)) {
560 return;
561 }
562 }
563 InlineCacheBuffer::refill_ic_stubs();
564 }
565 }
566
567 // Called to clean up after class unloading for live nmethods and from the sweeper
568 // for all methods.
569 bool CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
570 assert(CompiledICLocker::is_safe(this), "mt unsafe call");
571 ResourceMark rm;
572
573 // Find all calls in an nmethod and clear the ones that point to non-entrant,
574 // zombie and unloaded nmethods.
575 RelocIterator iter(this, oops_reloc_begin());
576 while(iter.next()) {
577
578 switch (iter.type()) {
579
580 case relocInfo::virtual_call_type:
581 if (unloading_occurred) {
582 // If class unloading occurred we first clear ICs where the cached metadata
583 // is referring to an unloaded klass or method.
584 if (!clean_ic_if_metadata_is_dead(CompiledIC_at(&iter))) {
585 return false;
586 }
587 }
588
589 if (!clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all)) {
590 return false;
591 }
592 break;
593
594 case relocInfo::opt_virtual_call_type:
595 if (!clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this, clean_all)) {
596 return false;
597 }
598 break;
599
600 case relocInfo::static_call_type:
601 if (!clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this, clean_all)) {
602 return false;
603 }
604 break;
605
606 default:
607 break;
608 }
609 }
610
611 return true;
612 }
613
614 // Iterating over all nmethods, e.g. with the help of CodeCache::nmethods_do(fun) was found
615 // to not be inherently safe. There is a chance that fields are seen which are not properly
616 // initialized. This happens despite the fact that nmethods_do() asserts the CodeCache_lock
617 // to be held.
618 // To bundle knowledge about necessary checks in one place, this function was introduced.
619 // It is not claimed that these checks are sufficient, but they were found to be necessary.
620 bool CompiledMethod::nmethod_access_is_safe(nmethod* nm) {
621 Method* method = (nm == NULL) ? NULL : nm->method(); // nm->method() may be uninitialized, i.e. != NULL, but invalid
622 return (nm != NULL) && (method != NULL) && (method->signature() != NULL) &&
623 !nm->is_zombie() && !nm->is_not_installed() &&
624 os::is_readable_pointer(method) &&
625 os::is_readable_pointer(method->constants()) &&
626 os::is_readable_pointer(method->signature());
627 }
|