567 int nmethod_size,
568 CodeOffsets* offsets,
569 CodeBuffer* code_buffer,
570 int frame_size,
571 ByteSize basic_lock_owner_sp_offset,
572 ByteSize basic_lock_sp_offset,
573 OopMapSet* oop_maps )
574 : CodeBlob("native nmethod", code_buffer, sizeof(nmethod),
575 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps),
576 _compiled_synchronized_native_basic_lock_owner_sp_offset(basic_lock_owner_sp_offset),
577 _compiled_synchronized_native_basic_lock_sp_offset(basic_lock_sp_offset)
578 {
579 {
580 debug_only(No_Safepoint_Verifier nsv;)
581 assert_locked_or_safepoint(CodeCache_lock);
582
583 NOT_PRODUCT(_has_debug_info = false);
584 _oops_do_mark_link = NULL;
585 _method = method;
586 _entry_bci = InvocationEntryBci;
587 _osr_link = NULL;
588 _scavenge_root_link = NULL;
589 _scavenge_root_state = 0;
590 _saved_nmethod_link = NULL;
591 _compiler = NULL;
592 // We have no exception handler or deopt handler make the
593 // values something that will never match a pc like the nmethod vtable entry
594 _exception_offset = 0;
595 _deoptimize_offset = 0;
596 _deoptimize_mh_offset = 0;
597 _orig_pc_offset = 0;
598 #ifdef HAVE_DTRACE_H
599 _trap_offset = 0;
600 #endif // def HAVE_DTRACE_H
601 _stub_offset = data_offset();
602 _consts_offset = data_offset();
603 _oops_offset = data_offset();
604 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
605 _scopes_pcs_offset = _scopes_data_offset;
606 _dependencies_offset = _scopes_pcs_offset;
660 #ifdef HAVE_DTRACE_H
661 nmethod::nmethod(
662 methodOop method,
663 int nmethod_size,
664 CodeOffsets* offsets,
665 CodeBuffer* code_buffer,
666 int frame_size)
667 : CodeBlob("dtrace nmethod", code_buffer, sizeof(nmethod),
668 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, NULL),
669 _compiled_synchronized_native_basic_lock_owner_sp_offset(in_ByteSize(-1)),
670 _compiled_synchronized_native_basic_lock_sp_offset(in_ByteSize(-1))
671 {
672 {
673 debug_only(No_Safepoint_Verifier nsv;)
674 assert_locked_or_safepoint(CodeCache_lock);
675
676 NOT_PRODUCT(_has_debug_info = false);
677 _oops_do_mark_link = NULL;
678 _method = method;
679 _entry_bci = InvocationEntryBci;
680 _osr_link = NULL;
681 _scavenge_root_link = NULL;
682 _scavenge_root_state = 0;
683 _compiler = NULL;
684 // We have no exception handler or deopt handler make the
685 // values something that will never match a pc like the nmethod vtable entry
686 _exception_offset = 0;
687 _deoptimize_offset = 0;
688 _deoptimize_mh_offset = 0;
689 _unwind_handler_offset = -1;
690 _trap_offset = offsets->value(CodeOffsets::Dtrace_trap);
691 _orig_pc_offset = 0;
692 _stub_offset = data_offset();
693 _consts_offset = data_offset();
694 _oops_offset = data_offset();
695 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
696 _scopes_pcs_offset = _scopes_data_offset;
697 _dependencies_offset = _scopes_pcs_offset;
698 _handler_table_offset = _dependencies_offset;
699 _nul_chk_table_offset = _handler_table_offset;
767 int frame_size,
768 OopMapSet* oop_maps,
769 ExceptionHandlerTable* handler_table,
770 ImplicitExceptionTable* nul_chk_table,
771 AbstractCompiler* compiler,
772 int comp_level
773 )
774 : CodeBlob("nmethod", code_buffer, sizeof(nmethod),
775 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps),
776 _compiled_synchronized_native_basic_lock_owner_sp_offset(in_ByteSize(-1)),
777 _compiled_synchronized_native_basic_lock_sp_offset(in_ByteSize(-1))
778 {
779 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
780 {
781 debug_only(No_Safepoint_Verifier nsv;)
782 assert_locked_or_safepoint(CodeCache_lock);
783
784 NOT_PRODUCT(_has_debug_info = false);
785 _oops_do_mark_link = NULL;
786 _method = method;
787 _compile_id = compile_id;
788 _comp_level = comp_level;
789 _entry_bci = entry_bci;
790 _osr_link = NULL;
791 _scavenge_root_link = NULL;
792 _scavenge_root_state = 0;
793 _compiler = compiler;
794 _orig_pc_offset = orig_pc_offset;
795 #ifdef HAVE_DTRACE_H
796 _trap_offset = 0;
797 #endif // def HAVE_DTRACE_H
798 _stub_offset = instructions_offset() + code_buffer->total_offset_of(code_buffer->stubs()->start());
799
800 // Exception handler and deopt handler are in the stub section
801 _exception_offset = _stub_offset + offsets->value(CodeOffsets::Exceptions);
802 _deoptimize_offset = _stub_offset + offsets->value(CodeOffsets::Deopt);
803 _deoptimize_mh_offset = _stub_offset + offsets->value(CodeOffsets::DeoptMH);
804 if (offsets->value(CodeOffsets::UnwindHandler) != -1) {
805 _unwind_handler_offset = instructions_offset() + offsets->value(CodeOffsets::UnwindHandler);
806 } else {
1471 make_unloaded(is_alive, obj);
1472 return true;
1473 }
1474
1475 // ------------------------------------------------------------------
1476 // post_compiled_method_load_event
1477 // new method for install_code() path
1478 // Transfer information from compilation to jvmti
1479 void nmethod::post_compiled_method_load_event() {
1480
1481 methodOop moop = method();
1482 HS_DTRACE_PROBE8(hotspot, compiled__method__load,
1483 moop->klass_name()->bytes(),
1484 moop->klass_name()->utf8_length(),
1485 moop->name()->bytes(),
1486 moop->name()->utf8_length(),
1487 moop->signature()->bytes(),
1488 moop->signature()->utf8_length(),
1489 code_begin(), code_size());
1490
1491 if (JvmtiExport::should_post_compiled_method_load()) {
1492 JvmtiExport::post_compiled_method_load(this);
1493 }
1494 }
1495
1496 void nmethod::post_compiled_method_unload() {
1497 if (unload_reported()) {
1498 // During unloading we transition to unloaded and then to zombie
1499 // and the unloading is reported during the first transition.
1500 return;
1501 }
1502
1503 assert(_method != NULL && !is_unloaded(), "just checking");
1504 DTRACE_METHOD_UNLOAD_PROBE(method());
1505
1506 // If a JVMTI agent has enabled the CompiledMethodUnload event then
1507 // post the event. Sometime later this nmethod will be made a zombie by
1508 // the sweeper but the methodOop will not be valid at that point.
1509 if (JvmtiExport::should_post_compiled_method_unload()) {
1510 assert(!unload_reported(), "already unloaded");
1511 HandleMark hm;
1512 JvmtiExport::post_compiled_method_unload(method()->jmethod_id(), code_begin());
1513 }
1514
1515 // The JVMTI CompiledMethodUnload event can be enabled or disabled at
1516 // any time. As the nmethod is being unloaded now we mark it has
1517 // having the unload event reported - this will ensure that we don't
1518 // attempt to report the event in the unlikely scenario where the
1519 // event is enabled at the time the nmethod is made a zombie.
1520 set_unload_reported();
1521 }
1522
1523 // This is called at the end of the strong tracing/marking phase of a
1524 // GC to unload an nmethod if it contains otherwise unreachable
1525 // oops.
1526
1527 void nmethod::do_unloading(BoolObjectClosure* is_alive,
1528 OopClosure* keep_alive, bool unloading_occurred) {
1529 // Make sure the oop's ready to receive visitors
1530 assert(!is_zombie() && !is_unloaded(),
1531 "should not call follow on zombie or unloaded nmethod");
1532
|
567 int nmethod_size,
568 CodeOffsets* offsets,
569 CodeBuffer* code_buffer,
570 int frame_size,
571 ByteSize basic_lock_owner_sp_offset,
572 ByteSize basic_lock_sp_offset,
573 OopMapSet* oop_maps )
574 : CodeBlob("native nmethod", code_buffer, sizeof(nmethod),
575 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps),
576 _compiled_synchronized_native_basic_lock_owner_sp_offset(basic_lock_owner_sp_offset),
577 _compiled_synchronized_native_basic_lock_sp_offset(basic_lock_sp_offset)
578 {
579 {
580 debug_only(No_Safepoint_Verifier nsv;)
581 assert_locked_or_safepoint(CodeCache_lock);
582
583 NOT_PRODUCT(_has_debug_info = false);
584 _oops_do_mark_link = NULL;
585 _method = method;
586 _entry_bci = InvocationEntryBci;
587 _jmethod_id = NULL;
588 _osr_link = NULL;
589 _scavenge_root_link = NULL;
590 _scavenge_root_state = 0;
591 _saved_nmethod_link = NULL;
592 _compiler = NULL;
593 // We have no exception handler or deopt handler make the
594 // values something that will never match a pc like the nmethod vtable entry
595 _exception_offset = 0;
596 _deoptimize_offset = 0;
597 _deoptimize_mh_offset = 0;
598 _orig_pc_offset = 0;
599 #ifdef HAVE_DTRACE_H
600 _trap_offset = 0;
601 #endif // def HAVE_DTRACE_H
602 _stub_offset = data_offset();
603 _consts_offset = data_offset();
604 _oops_offset = data_offset();
605 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
606 _scopes_pcs_offset = _scopes_data_offset;
607 _dependencies_offset = _scopes_pcs_offset;
661 #ifdef HAVE_DTRACE_H
662 nmethod::nmethod(
663 methodOop method,
664 int nmethod_size,
665 CodeOffsets* offsets,
666 CodeBuffer* code_buffer,
667 int frame_size)
668 : CodeBlob("dtrace nmethod", code_buffer, sizeof(nmethod),
669 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, NULL),
670 _compiled_synchronized_native_basic_lock_owner_sp_offset(in_ByteSize(-1)),
671 _compiled_synchronized_native_basic_lock_sp_offset(in_ByteSize(-1))
672 {
673 {
674 debug_only(No_Safepoint_Verifier nsv;)
675 assert_locked_or_safepoint(CodeCache_lock);
676
677 NOT_PRODUCT(_has_debug_info = false);
678 _oops_do_mark_link = NULL;
679 _method = method;
680 _entry_bci = InvocationEntryBci;
681 _jmethod_id = NULL;
682 _osr_link = NULL;
683 _scavenge_root_link = NULL;
684 _scavenge_root_state = 0;
685 _compiler = NULL;
686 // We have no exception handler or deopt handler make the
687 // values something that will never match a pc like the nmethod vtable entry
688 _exception_offset = 0;
689 _deoptimize_offset = 0;
690 _deoptimize_mh_offset = 0;
691 _unwind_handler_offset = -1;
692 _trap_offset = offsets->value(CodeOffsets::Dtrace_trap);
693 _orig_pc_offset = 0;
694 _stub_offset = data_offset();
695 _consts_offset = data_offset();
696 _oops_offset = data_offset();
697 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
698 _scopes_pcs_offset = _scopes_data_offset;
699 _dependencies_offset = _scopes_pcs_offset;
700 _handler_table_offset = _dependencies_offset;
701 _nul_chk_table_offset = _handler_table_offset;
769 int frame_size,
770 OopMapSet* oop_maps,
771 ExceptionHandlerTable* handler_table,
772 ImplicitExceptionTable* nul_chk_table,
773 AbstractCompiler* compiler,
774 int comp_level
775 )
776 : CodeBlob("nmethod", code_buffer, sizeof(nmethod),
777 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps),
778 _compiled_synchronized_native_basic_lock_owner_sp_offset(in_ByteSize(-1)),
779 _compiled_synchronized_native_basic_lock_sp_offset(in_ByteSize(-1))
780 {
781 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
782 {
783 debug_only(No_Safepoint_Verifier nsv;)
784 assert_locked_or_safepoint(CodeCache_lock);
785
786 NOT_PRODUCT(_has_debug_info = false);
787 _oops_do_mark_link = NULL;
788 _method = method;
789 _jmethod_id = NULL;
790 _compile_id = compile_id;
791 _comp_level = comp_level;
792 _entry_bci = entry_bci;
793 _osr_link = NULL;
794 _scavenge_root_link = NULL;
795 _scavenge_root_state = 0;
796 _compiler = compiler;
797 _orig_pc_offset = orig_pc_offset;
798 #ifdef HAVE_DTRACE_H
799 _trap_offset = 0;
800 #endif // def HAVE_DTRACE_H
801 _stub_offset = instructions_offset() + code_buffer->total_offset_of(code_buffer->stubs()->start());
802
803 // Exception handler and deopt handler are in the stub section
804 _exception_offset = _stub_offset + offsets->value(CodeOffsets::Exceptions);
805 _deoptimize_offset = _stub_offset + offsets->value(CodeOffsets::Deopt);
806 _deoptimize_mh_offset = _stub_offset + offsets->value(CodeOffsets::DeoptMH);
807 if (offsets->value(CodeOffsets::UnwindHandler) != -1) {
808 _unwind_handler_offset = instructions_offset() + offsets->value(CodeOffsets::UnwindHandler);
809 } else {
1474 make_unloaded(is_alive, obj);
1475 return true;
1476 }
1477
1478 // ------------------------------------------------------------------
1479 // post_compiled_method_load_event
1480 // new method for install_code() path
1481 // Transfer information from compilation to jvmti
1482 void nmethod::post_compiled_method_load_event() {
1483
1484 methodOop moop = method();
1485 HS_DTRACE_PROBE8(hotspot, compiled__method__load,
1486 moop->klass_name()->bytes(),
1487 moop->klass_name()->utf8_length(),
1488 moop->name()->bytes(),
1489 moop->name()->utf8_length(),
1490 moop->signature()->bytes(),
1491 moop->signature()->utf8_length(),
1492 code_begin(), code_size());
1493
1494 if (JvmtiExport::should_post_compiled_method_load() ||
1495 JvmtiExport::should_post_compiled_method_unload()) {
1496 get_and_cache_jmethod_id();
1497 }
1498
1499 if (JvmtiExport::should_post_compiled_method_load()) {
1500 JvmtiExport::post_compiled_method_load(this);
1501 }
1502 }
1503
1504 jmethodID nmethod::get_and_cache_jmethod_id() {
1505 if (_jmethod_id == NULL) {
1506 // Cache the jmethod_id since it can no longer be looked up once the
1507 // method itself has been marked for unloading.
1508 _jmethod_id = method()->jmethod_id();
1509 }
1510 return _jmethod_id;
1511 }
1512
1513 void nmethod::post_compiled_method_unload() {
1514 if (unload_reported()) {
1515 // During unloading we transition to unloaded and then to zombie
1516 // and the unloading is reported during the first transition.
1517 return;
1518 }
1519
1520 assert(_method != NULL && !is_unloaded(), "just checking");
1521 DTRACE_METHOD_UNLOAD_PROBE(method());
1522
1523 // If a JVMTI agent has enabled the CompiledMethodUnload event then
1524 // post the event. Sometime later this nmethod will be made a zombie
1525 // by the sweeper but the methodOop will not be valid at that point.
1526 // If the _jmethod_id is null then no load event was ever requested
1527 // so don't bother posting the unload. The main reason for this is
1528 // that the jmethodID is a weak reference to the methodOop so if
1529 // it's being unloaded there's no way to look it up since the weak
1530 // ref will have been cleared.
1531 if (_jmethod_id != NULL && JvmtiExport::should_post_compiled_method_unload()) {
1532 assert(!unload_reported(), "already unloaded");
1533 HandleMark hm;
1534 JvmtiExport::post_compiled_method_unload(_jmethod_id, code_begin());
1535 }
1536
1537 // The JVMTI CompiledMethodUnload event can be enabled or disabled at
1538 // any time. As the nmethod is being unloaded now we mark it has
1539 // having the unload event reported - this will ensure that we don't
1540 // attempt to report the event in the unlikely scenario where the
1541 // event is enabled at the time the nmethod is made a zombie.
1542 set_unload_reported();
1543 }
1544
1545 // This is called at the end of the strong tracing/marking phase of a
1546 // GC to unload an nmethod if it contains otherwise unreachable
1547 // oops.
1548
1549 void nmethod::do_unloading(BoolObjectClosure* is_alive,
1550 OopClosure* keep_alive, bool unloading_occurred) {
1551 // Make sure the oop's ready to receive visitors
1552 assert(!is_zombie() && !is_unloaded(),
1553 "should not call follow on zombie or unloaded nmethod");
1554
|