372 // Check class_loader is alive for this bit of metadata. 373 static void check_class(Metadata* md) { 374 Klass* klass = NULL; 375 if (md->is_klass()) { 376 klass = ((Klass*)md); 377 } else if (md->is_method()) { 378 klass = ((Method*)md)->method_holder(); 379 } else if (md->is_methodData()) { 380 klass = ((MethodData*)md)->method()->method_holder(); 381 } else { 382 md->print(); 383 ShouldNotReachHere(); 384 } 385 assert(klass->is_loader_alive(), "must be alive"); 386 } 387 #endif // ASSERT 388 389 390 void CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic) { 391 if (ic->is_icholder_call()) { 392 // The only exception is compiledICHolder oops which may 393 // yet be marked below. (We check this further below). 394 CompiledICHolder* cichk_oop = ic->cached_icholder(); 395 396 if (cichk_oop->is_loader_alive()) { 397 return; 398 } 399 } else { 400 Metadata* ic_oop = ic->cached_metadata(); 401 if (ic_oop != NULL) { 402 if (ic_oop->is_klass()) { 403 if (((Klass*)ic_oop)->is_loader_alive()) { 404 return; 405 } 406 } else if (ic_oop->is_method()) { 407 if (((Method*)ic_oop)->method_holder()->is_loader_alive()) { 408 return; 409 } 410 } else { 411 ShouldNotReachHere(); 412 } 413 } 414 } 415 416 ic->set_to_clean(); 417 } 418 419 unsigned char CompiledMethod::_global_unloading_clock = 0; 420 421 void CompiledMethod::increase_unloading_clock() { 422 _global_unloading_clock++; 423 if (_global_unloading_clock == 0) { 424 // _nmethods are allocated with _unloading_clock == 0, 425 // so 0 is never used as a clock value. 426 _global_unloading_clock = 1; 427 } 476 // This is called at the end of the strong tracing/marking phase of a 477 // GC to unload an nmethod if it contains otherwise unreachable 478 // oops. 479 480 void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) { 481 // Make sure the oop's ready to receive visitors 482 assert(!is_zombie() && !is_unloaded(), 483 "should not call follow on zombie or unloaded nmethod"); 484 485 // If the method is not entrant then a JMP is plastered over the 486 // first few bytes. If an oop in the old code was there, that oop 487 // should not get GC'd. Skip the first few bytes of oops on 488 // not-entrant methods. 489 address low_boundary = verified_entry_point(); 490 if (is_not_entrant()) { 491 low_boundary += NativeJump::instruction_size; 492 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. 493 // (See comment above.) 494 } 495 496 // The RedefineClasses() API can cause the class unloading invariant 497 // to no longer be true. See jvmtiExport.hpp for details. 498 // Also, leave a debugging breadcrumb in local flag. 499 if (JvmtiExport::has_redefined_a_class()) { 500 // This set of the unloading_occurred flag is done before the 501 // call to post_compiled_method_unload() so that the unloading 502 // of this nmethod is reported. 503 unloading_occurred = true; 504 } 505 506 // Exception cache 507 clean_exception_cache(); 508 509 // If class unloading occurred we first iterate over all inline caches and 510 // clear ICs where the cached oop is referring to an unloaded klass or method. 511 // The remaining live cached oops will be traversed in the relocInfo::oop_type 512 // iteration below. 513 if (unloading_occurred) { 514 RelocIterator iter(this, low_boundary); 515 while(iter.next()) { 516 if (iter.type() == relocInfo::virtual_call_type) { 517 CompiledIC *ic = CompiledIC_at(&iter); 518 clean_ic_if_metadata_is_dead(ic); 519 } 520 } 521 } 522 523 if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) { 524 return; 525 } 562 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from) { 563 return clean_if_nmethod_is_unloaded(csc, csc->destination(), from); 564 } 565 566 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) { 567 ResourceMark rm; 568 569 // Make sure the oop's ready to receive visitors 570 assert(!is_zombie() && !is_unloaded(), 571 "should not call follow on zombie or unloaded nmethod"); 572 573 // If the method is not entrant then a JMP is plastered over the 574 // first few bytes. If an oop in the old code was there, that oop 575 // should not get GC'd. Skip the first few bytes of oops on 576 // not-entrant methods. 577 address low_boundary = verified_entry_point(); 578 if (is_not_entrant()) { 579 low_boundary += NativeJump::instruction_size; 580 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. 581 // (See comment above.) 582 } 583 584 // The RedefineClasses() API can cause the class unloading invariant 585 // to no longer be true. See jvmtiExport.hpp for details. 586 // Also, leave a debugging breadcrumb in local flag. 587 if (JvmtiExport::has_redefined_a_class()) { 588 // This set of the unloading_occurred flag is done before the 589 // call to post_compiled_method_unload() so that the unloading 590 // of this nmethod is reported. 591 unloading_occurred = true; 592 } 593 594 // Exception cache 595 clean_exception_cache(); 596 597 bool postponed = false; 598 599 RelocIterator iter(this, low_boundary); 600 while(iter.next()) { 601 602 switch (iter.type()) { 603 604 case relocInfo::virtual_call_type: 605 if (unloading_occurred) { 606 // If class unloading occurred we first iterate over all inline caches and 607 // clear ICs where the cached oop is referring to an unloaded klass or method. 608 clean_ic_if_metadata_is_dead(CompiledIC_at(&iter)); 609 } 610 611 postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this); | 372 // Check class_loader is alive for this bit of metadata. 373 static void check_class(Metadata* md) { 374 Klass* klass = NULL; 375 if (md->is_klass()) { 376 klass = ((Klass*)md); 377 } else if (md->is_method()) { 378 klass = ((Method*)md)->method_holder(); 379 } else if (md->is_methodData()) { 380 klass = ((MethodData*)md)->method()->method_holder(); 381 } else { 382 md->print(); 383 ShouldNotReachHere(); 384 } 385 assert(klass->is_loader_alive(), "must be alive"); 386 } 387 #endif // ASSERT 388 389 390 void CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic) { 391 if (ic->is_icholder_call()) { 392 // The only exception is compiledICHolder metdata which may 393 // yet be marked below. (We check this further below). 394 CompiledICHolder* cichk_metdata = ic->cached_icholder(); 395 396 if (cichk_metdata->is_loader_alive()) { 397 return; 398 } 399 } else { 400 Metadata* ic_metdata = ic->cached_metadata(); 401 if (ic_metdata != NULL) { 402 if (ic_metdata->is_klass()) { 403 if (((Klass*)ic_metdata)->is_loader_alive()) { 404 return; 405 } 406 } else if (ic_metdata->is_method()) { 407 Method* method = (Method*)ic_metdata; 408 assert(!method->is_old(), "old method should have been cleaned"); 409 if (method->method_holder()->is_loader_alive()) { 410 return; 411 } 412 } else { 413 ShouldNotReachHere(); 414 } 415 } 416 } 417 418 ic->set_to_clean(); 419 } 420 421 unsigned char CompiledMethod::_global_unloading_clock = 0; 422 423 void CompiledMethod::increase_unloading_clock() { 424 _global_unloading_clock++; 425 if (_global_unloading_clock == 0) { 426 // _nmethods are allocated with _unloading_clock == 0, 427 // so 0 is never used as a clock value. 428 _global_unloading_clock = 1; 429 } 478 // This is called at the end of the strong tracing/marking phase of a 479 // GC to unload an nmethod if it contains otherwise unreachable 480 // oops. 481 482 void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) { 483 // Make sure the oop's ready to receive visitors 484 assert(!is_zombie() && !is_unloaded(), 485 "should not call follow on zombie or unloaded nmethod"); 486 487 // If the method is not entrant then a JMP is plastered over the 488 // first few bytes. If an oop in the old code was there, that oop 489 // should not get GC'd. Skip the first few bytes of oops on 490 // not-entrant methods. 491 address low_boundary = verified_entry_point(); 492 if (is_not_entrant()) { 493 low_boundary += NativeJump::instruction_size; 494 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. 495 // (See comment above.) 496 } 497 498 // Exception cache 499 clean_exception_cache(); 500 501 // If class unloading occurred we first iterate over all inline caches and 502 // clear ICs where the cached oop is referring to an unloaded klass or method. 503 // The remaining live cached oops will be traversed in the relocInfo::oop_type 504 // iteration below. 505 if (unloading_occurred) { 506 RelocIterator iter(this, low_boundary); 507 while(iter.next()) { 508 if (iter.type() == relocInfo::virtual_call_type) { 509 CompiledIC *ic = CompiledIC_at(&iter); 510 clean_ic_if_metadata_is_dead(ic); 511 } 512 } 513 } 514 515 if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) { 516 return; 517 } 554 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from) { 555 return clean_if_nmethod_is_unloaded(csc, csc->destination(), from); 556 } 557 558 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) { 559 ResourceMark rm; 560 561 // Make sure the oop's ready to receive visitors 562 assert(!is_zombie() && !is_unloaded(), 563 "should not call follow on zombie or unloaded nmethod"); 564 565 // If the method is not entrant then a JMP is plastered over the 566 // first few bytes. If an oop in the old code was there, that oop 567 // should not get GC'd. Skip the first few bytes of oops on 568 // not-entrant methods. 569 address low_boundary = verified_entry_point(); 570 if (is_not_entrant()) { 571 low_boundary += NativeJump::instruction_size; 572 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. 573 // (See comment above.) 574 } 575 576 // Exception cache 577 clean_exception_cache(); 578 579 bool postponed = false; 580 581 RelocIterator iter(this, low_boundary); 582 while(iter.next()) { 583 584 switch (iter.type()) { 585 586 case relocInfo::virtual_call_type: 587 if (unloading_occurred) { 588 // If class unloading occurred we first iterate over all inline caches and 589 // clear ICs where the cached oop is referring to an unloaded klass or method. 590 clean_ic_if_metadata_is_dead(CompiledIC_at(&iter)); 591 } 592 593 postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this); |