320
321 // Walk the list of methods which might contain non-perm oops.
322 void CodeCache::scavenge_root_nmethods_do(CodeBlobClosure* f) {
323 assert_locked_or_safepoint(CodeCache_lock);
324 debug_only(mark_scavenge_root_nmethods());
325
326 for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
327 debug_only(cur->clear_scavenge_root_marked());
328 assert(cur->scavenge_root_not_marked(), "");
329 assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
330
331 bool is_live = (!cur->is_zombie() && !cur->is_unloaded());
332 #ifndef PRODUCT
333 if (TraceScavenge) {
334 cur->print_on(tty, is_live ? "scavenge root" : "dead scavenge root"); tty->cr();
335 }
336 #endif //PRODUCT
337 if (is_live) {
338 // Perform cur->oops_do(f), maybe just once per nmethod.
339 f->do_code_blob(cur);
340 cur->fix_oop_relocations();
341 }
342 }
343
344 // Check for stray marks.
345 debug_only(verify_perm_nmethods(NULL));
346 }
347
348 void CodeCache::add_scavenge_root_nmethod(nmethod* nm) {
349 assert_locked_or_safepoint(CodeCache_lock);
350 nm->set_on_scavenge_root_list();
351 nm->set_scavenge_root_link(_scavenge_root_nmethods);
352 set_scavenge_root_nmethods(nm);
353 print_trace("add_scavenge_root", nm);
354 }
355
356 void CodeCache::drop_scavenge_root_nmethod(nmethod* nm) {
357 assert_locked_or_safepoint(CodeCache_lock);
358 print_trace("drop_scavenge_root", nm);
359 nmethod* last = NULL;
360 nmethod* cur = scavenge_root_nmethods();
534
535
536 void CodeCache::gc_epilogue() {
537 assert_locked_or_safepoint(CodeCache_lock);
538 FOR_ALL_ALIVE_BLOBS(cb) {
539 if (cb->is_nmethod()) {
540 nmethod *nm = (nmethod*)cb;
541 assert(!nm->is_unloaded(), "Tautology");
542 if (needs_cache_clean()) {
543 nm->cleanup_inline_caches();
544 }
545 DEBUG_ONLY(nm->verify());
546 nm->fix_oop_relocations();
547 }
548 }
549 set_needs_cache_clean(false);
550 prune_scavenge_root_nmethods();
551 assert(!nmethod::oops_do_marking_is_active(), "oops_do_marking_prologue must be called");
552 }
553
554
555 address CodeCache::first_address() {
556 assert_locked_or_safepoint(CodeCache_lock);
557 return (address)_heap->begin();
558 }
559
560
561 address CodeCache::last_address() {
562 assert_locked_or_safepoint(CodeCache_lock);
563 return (address)_heap->end();
564 }
565
566
567 void icache_init();
568
569 void CodeCache::initialize() {
570 assert(CodeCacheSegmentSize >= (uintx)CodeEntryAlignment, "CodeCacheSegmentSize must be large enough to align entry points");
571 #ifdef COMPILER2
572 assert(CodeCacheSegmentSize >= (uintx)OptoLoopAlignment, "CodeCacheSegmentSize must be large enough to align inner loops");
573 #endif
|
320
321 // Walk the list of methods which might contain non-perm oops.
322 void CodeCache::scavenge_root_nmethods_do(CodeBlobClosure* f) {
323 assert_locked_or_safepoint(CodeCache_lock);
324 debug_only(mark_scavenge_root_nmethods());
325
326 for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
327 debug_only(cur->clear_scavenge_root_marked());
328 assert(cur->scavenge_root_not_marked(), "");
329 assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
330
331 bool is_live = (!cur->is_zombie() && !cur->is_unloaded());
332 #ifndef PRODUCT
333 if (TraceScavenge) {
334 cur->print_on(tty, is_live ? "scavenge root" : "dead scavenge root"); tty->cr();
335 }
336 #endif //PRODUCT
337 if (is_live) {
338 // Perform cur->oops_do(f), maybe just once per nmethod.
339 f->do_code_blob(cur);
340 }
341 }
342
343 // Check for stray marks.
344 debug_only(verify_perm_nmethods(NULL));
345 }
346
347 void CodeCache::add_scavenge_root_nmethod(nmethod* nm) {
348 assert_locked_or_safepoint(CodeCache_lock);
349 nm->set_on_scavenge_root_list();
350 nm->set_scavenge_root_link(_scavenge_root_nmethods);
351 set_scavenge_root_nmethods(nm);
352 print_trace("add_scavenge_root", nm);
353 }
354
355 void CodeCache::drop_scavenge_root_nmethod(nmethod* nm) {
356 assert_locked_or_safepoint(CodeCache_lock);
357 print_trace("drop_scavenge_root", nm);
358 nmethod* last = NULL;
359 nmethod* cur = scavenge_root_nmethods();
533
534
535 void CodeCache::gc_epilogue() {
536 assert_locked_or_safepoint(CodeCache_lock);
537 FOR_ALL_ALIVE_BLOBS(cb) {
538 if (cb->is_nmethod()) {
539 nmethod *nm = (nmethod*)cb;
540 assert(!nm->is_unloaded(), "Tautology");
541 if (needs_cache_clean()) {
542 nm->cleanup_inline_caches();
543 }
544 DEBUG_ONLY(nm->verify());
545 nm->fix_oop_relocations();
546 }
547 }
548 set_needs_cache_clean(false);
549 prune_scavenge_root_nmethods();
550 assert(!nmethod::oops_do_marking_is_active(), "oops_do_marking_prologue must be called");
551 }
552
553
554 void CodeCache::verify_oops() {
555 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
556 VerifyOopClosure voc;
557 FOR_ALL_ALIVE_BLOBS(cb) {
558 if (cb->is_nmethod()) {
559 nmethod *nm = (nmethod*)cb;
560 nm->oops_do(&voc);
561 nm->verify_oop_relocations();
562 }
563 }
564 }
565
566
567 address CodeCache::first_address() {
568 assert_locked_or_safepoint(CodeCache_lock);
569 return (address)_heap->begin();
570 }
571
572
573 address CodeCache::last_address() {
574 assert_locked_or_safepoint(CodeCache_lock);
575 return (address)_heap->end();
576 }
577
578
579 void icache_init();
580
581 void CodeCache::initialize() {
582 assert(CodeCacheSegmentSize >= (uintx)CodeEntryAlignment, "CodeCacheSegmentSize must be large enough to align entry points");
583 #ifdef COMPILER2
584 assert(CodeCacheSegmentSize >= (uintx)OptoLoopAlignment, "CodeCacheSegmentSize must be large enough to align inner loops");
585 #endif
|