1017 low_boundary += NativeJump::instruction_size;
1018 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1019 // This means that the low_boundary is going to be a little too high.
1020 // This shouldn't matter, since oops of non-entrant methods are never used.
1021 // In fact, why are we bothering to look at oops in a non-entrant method??
1022 }
1023
1024 // Find all calls in an nmethod, and clear the ones that points to zombie methods
1025 ResourceMark rm;
1026 RelocIterator iter(this, low_boundary);
1027 while(iter.next()) {
1028 switch(iter.type()) {
1029 case relocInfo::virtual_call_type:
1030 case relocInfo::opt_virtual_call_type: {
1031 CompiledIC *ic = CompiledIC_at(iter.reloc());
1032 // Ok, to lookup references to zombies here
1033 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1034 if( cb != NULL && cb->is_nmethod() ) {
1035 nmethod* nm = (nmethod*)cb;
1036 // Clean inline caches pointing to both zombie and not_entrant methods
1037 if (!nm->is_in_use()) ic->set_to_clean();
1038 }
1039 break;
1040 }
1041 case relocInfo::static_call_type: {
1042 CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1043 CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1044 if( cb != NULL && cb->is_nmethod() ) {
1045 nmethod* nm = (nmethod*)cb;
1046 // Clean inline caches pointing to both zombie and not_entrant methods
1047 if (!nm->is_in_use()) csc->set_to_clean();
1048 }
1049 break;
1050 }
1051 }
1052 }
1053 }
1054
1055 // This is a private interface with the sweeper.
1056 void nmethod::mark_as_seen_on_stack() {
1057 assert(is_not_entrant(), "must be a non-entrant method");
1058 set_stack_traversal_mark(NMethodSweeper::traversal_count());
1059 }
1060
1061 // Tell if a non-entrant method can be converted to a zombie (i.e., there is no activations on the stack)
1062 bool nmethod::can_not_entrant_be_converted() {
1063 assert(is_not_entrant(), "must be a non-entrant method");
1064 assert(SafepointSynchronize::is_at_safepoint(), "must be called during a safepoint");
1065
1066 // Since the nmethod sweeper only does partial sweep the sweeper's traversal
1067 // count can be greater than the stack traversal count before it hits the
1286
1287
1288 #ifndef PRODUCT
1289 void nmethod::check_safepoint() {
1290 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
1291 }
1292 #endif
1293
1294
1295 void nmethod::flush() {
1296 // Note that there are no valid oops in the nmethod anymore.
1297 assert(is_zombie() || (is_osr_method() && is_unloaded()), "must be a zombie method");
1298 assert(is_marked_for_reclamation() || (is_osr_method() && is_unloaded()), "must be marked for reclamation");
1299
1300 assert (!is_locked_by_vm(), "locked methods shouldn't be flushed");
1301 check_safepoint();
1302
1303 // completely deallocate this method
1304 EventMark m("flushing nmethod " INTPTR_FORMAT " %s", this, "");
1305 if (PrintMethodFlushing) {
1306 tty->print_cr("*flushing nmethod " INTPTR_FORMAT ". Live blobs: %d", this, CodeCache::nof_blobs());
1307 }
1308
1309 // We need to deallocate any ExceptionCache data.
1310 // Note that we do not need to grab the nmethod lock for this, it
1311 // better be thread safe if we're disposing of it!
1312 ExceptionCache* ec = exception_cache();
1313 set_exception_cache(NULL);
1314 while(ec != NULL) {
1315 ExceptionCache* next = ec->next();
1316 delete ec;
1317 ec = next;
1318 }
1319
1320 if (on_scavenge_root_list()) {
1321 CodeCache::drop_scavenge_root_nmethod(this);
1322 }
1323
1324 ((CodeBlob*)(this))->flush();
1325
1326 CodeCache::free(this);
|
1017 low_boundary += NativeJump::instruction_size;
1018 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1019 // This means that the low_boundary is going to be a little too high.
1020 // This shouldn't matter, since oops of non-entrant methods are never used.
1021 // In fact, why are we bothering to look at oops in a non-entrant method??
1022 }
1023
1024 // Find all calls in an nmethod, and clear the ones that points to zombie methods
1025 ResourceMark rm;
1026 RelocIterator iter(this, low_boundary);
1027 while(iter.next()) {
1028 switch(iter.type()) {
1029 case relocInfo::virtual_call_type:
1030 case relocInfo::opt_virtual_call_type: {
1031 CompiledIC *ic = CompiledIC_at(iter.reloc());
1032 // Ok, to lookup references to zombies here
1033 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1034 if( cb != NULL && cb->is_nmethod() ) {
1035 nmethod* nm = (nmethod*)cb;
1036 // Clean inline caches pointing to both zombie and not_entrant methods
1037 if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean();
1038 }
1039 break;
1040 }
1041 case relocInfo::static_call_type: {
1042 CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1043 CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1044 if( cb != NULL && cb->is_nmethod() ) {
1045 nmethod* nm = (nmethod*)cb;
1046 // Clean inline caches pointing to both zombie and not_entrant methods
1047 if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
1048 }
1049 break;
1050 }
1051 }
1052 }
1053 }
1054
1055 // This is a private interface with the sweeper.
1056 void nmethod::mark_as_seen_on_stack() {
1057 assert(is_not_entrant(), "must be a non-entrant method");
1058 set_stack_traversal_mark(NMethodSweeper::traversal_count());
1059 }
1060
1061 // Tell if a non-entrant method can be converted to a zombie (i.e., there is no activations on the stack)
1062 bool nmethod::can_not_entrant_be_converted() {
1063 assert(is_not_entrant(), "must be a non-entrant method");
1064 assert(SafepointSynchronize::is_at_safepoint(), "must be called during a safepoint");
1065
1066 // Since the nmethod sweeper only does partial sweep the sweeper's traversal
1067 // count can be greater than the stack traversal count before it hits the
1286
1287
1288 #ifndef PRODUCT
1289 void nmethod::check_safepoint() {
1290 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
1291 }
1292 #endif
1293
1294
1295 void nmethod::flush() {
1296 // Note that there are no valid oops in the nmethod anymore.
1297 assert(is_zombie() || (is_osr_method() && is_unloaded()), "must be a zombie method");
1298 assert(is_marked_for_reclamation() || (is_osr_method() && is_unloaded()), "must be marked for reclamation");
1299
1300 assert (!is_locked_by_vm(), "locked methods shouldn't be flushed");
1301 check_safepoint();
1302
1303 // completely deallocate this method
1304 EventMark m("flushing nmethod " INTPTR_FORMAT " %s", this, "");
1305 if (PrintMethodFlushing) {
1306 tty->print_cr("*flushing nmethod " INTPTR_FORMAT ". " UINT32_FORMAT "/" SIZE_FORMAT "/" SIZE_FORMAT,
1307 this, CodeCache::nof_blobs(), CodeCache::unallocated_capacity(), CodeCache::max_capacity());
1308 }
1309
1310 // We need to deallocate any ExceptionCache data.
1311 // Note that we do not need to grab the nmethod lock for this, it
1312 // better be thread safe if we're disposing of it!
1313 ExceptionCache* ec = exception_cache();
1314 set_exception_cache(NULL);
1315 while(ec != NULL) {
1316 ExceptionCache* next = ec->next();
1317 delete ec;
1318 ec = next;
1319 }
1320
1321 if (on_scavenge_root_list()) {
1322 CodeCache::drop_scavenge_root_nmethod(this);
1323 }
1324
1325 ((CodeBlob*)(this))->flush();
1326
1327 CodeCache::free(this);
|