1 /*
   2  * Copyright (c) 2015, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "code/compiledIC.hpp"
  27 #include "code/compiledMethod.inline.hpp"
  28 #include "code/scopeDesc.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "prims/methodHandles.hpp"
  31 #include "interpreter/bytecode.hpp"
  32 #include "memory/resourceArea.hpp"
  33 #include "runtime/mutexLocker.hpp"
  34 
  35 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments)
  36   : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
  37   _method(method), _mark_for_deoptimization_status(not_marked) {
  38   init_defaults();
  39 }
  40 
  41 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, int size, int header_size, CodeBuffer* cb, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
  42   : CodeBlob(name, type, CodeBlobLayout((address) this, size, header_size, cb), cb, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
  43   _method(method), _mark_for_deoptimization_status(not_marked) {
  44   init_defaults();
  45 }
  46 
  47 void CompiledMethod::init_defaults() {
  48   _has_unsafe_access          = 0;
  49   _has_method_handle_invokes  = 0;
  50   _lazy_critical_native       = 0;
  51   _has_wide_vectors           = 0;
  52   _unloading_clock            = 0;
  53 }
  54 
  55 bool CompiledMethod::is_method_handle_return(address return_pc) {
  56   if (!has_method_handle_invokes())  return false;
  57   PcDesc* pd = pc_desc_at(return_pc);
  58   if (pd == NULL)
  59     return false;
  60   return pd->is_method_handle_invoke();
  61 }
  62 
  63 // Returns a string version of the method state.
  64 const char* CompiledMethod::state() const {
  65   int state = get_state();
  66   switch (state) {
  67   case in_use:
  68     return "in use";
  69   case not_used:
  70     return "not_used";
  71   case not_entrant:
  72     return "not_entrant";
  73   case zombie:
  74     return "zombie";
  75   case unloaded:
  76     return "unloaded";
  77   default:
  78     fatal("unexpected method state: %d", state);
  79     return NULL;
  80   }
  81 }
  82 
  83 //-----------------------------------------------------------------------------
  84 
  85 void CompiledMethod::add_exception_cache_entry(ExceptionCache* new_entry) {
  86   assert(ExceptionCache_lock->owned_by_self(),"Must hold the ExceptionCache_lock");
  87   assert(new_entry != NULL,"Must be non null");
  88   assert(new_entry->next() == NULL, "Must be null");
  89 
  90   ExceptionCache *ec = exception_cache();
  91   if (ec != NULL) {
  92     new_entry->set_next(ec);
  93   }
  94   release_set_exception_cache(new_entry);
  95 }
  96 
  97 void CompiledMethod::clean_exception_cache(BoolObjectClosure* is_alive) {
  98   ExceptionCache* prev = NULL;
  99   ExceptionCache* curr = exception_cache();
 100 
 101   while (curr != NULL) {
 102     ExceptionCache* next = curr->next();
 103 
 104     Klass* ex_klass = curr->exception_type();
 105     if (ex_klass != NULL && !ex_klass->is_loader_alive(is_alive)) {
 106       if (prev == NULL) {
 107         set_exception_cache(next);
 108       } else {
 109         prev->set_next(next);
 110       }
 111       delete curr;
 112       // prev stays the same.
 113     } else {
 114       prev = curr;
 115     }
 116 
 117     curr = next;
 118   }
 119 }
 120 
 121 // public method for accessing the exception cache
 122 // These are the public access methods.
 123 address CompiledMethod::handler_for_exception_and_pc(Handle exception, address pc) {
 124   // We never grab a lock to read the exception cache, so we may
 125   // have false negatives. This is okay, as it can only happen during
 126   // the first few exception lookups for a given nmethod.
 127   ExceptionCache* ec = exception_cache();
 128   while (ec != NULL) {
 129     address ret_val;
 130     if ((ret_val = ec->match(exception,pc)) != NULL) {
 131       return ret_val;
 132     }
 133     ec = ec->next();
 134   }
 135   return NULL;
 136 }
 137 
 138 void CompiledMethod::add_handler_for_exception_and_pc(Handle exception, address pc, address handler) {
 139   // There are potential race conditions during exception cache updates, so we
 140   // must own the ExceptionCache_lock before doing ANY modifications. Because
 141   // we don't lock during reads, it is possible to have several threads attempt
 142   // to update the cache with the same data. We need to check for already inserted
 143   // copies of the current data before adding it.
 144 
 145   MutexLocker ml(ExceptionCache_lock);
 146   ExceptionCache* target_entry = exception_cache_entry_for_exception(exception);
 147 
 148   if (target_entry == NULL || !target_entry->add_address_and_handler(pc,handler)) {
 149     target_entry = new ExceptionCache(exception,pc,handler);
 150     add_exception_cache_entry(target_entry);
 151   }
 152 }
 153 
 154 //-------------end of code for ExceptionCache--------------
 155 
 156 // private method for handling exception cache
 157 // These methods are private, and used to manipulate the exception cache
 158 // directly.
 159 ExceptionCache* CompiledMethod::exception_cache_entry_for_exception(Handle exception) {
 160   ExceptionCache* ec = exception_cache();
 161   while (ec != NULL) {
 162     if (ec->match_exception_with_space(exception)) {
 163       return ec;
 164     }
 165     ec = ec->next();
 166   }
 167   return NULL;
 168 }
 169 
 170 bool CompiledMethod::is_at_poll_return(address pc) {
 171   RelocIterator iter(this, pc, pc+1);
 172   while (iter.next()) {
 173     if (iter.type() == relocInfo::poll_return_type)
 174       return true;
 175   }
 176   return false;
 177 }
 178 
 179 
 180 bool CompiledMethod::is_at_poll_or_poll_return(address pc) {
 181   RelocIterator iter(this, pc, pc+1);
 182   while (iter.next()) {
 183     relocInfo::relocType t = iter.type();
 184     if (t == relocInfo::poll_return_type || t == relocInfo::poll_type)
 185       return true;
 186   }
 187   return false;
 188 }
 189 
 190 void CompiledMethod::verify_oop_relocations() {
 191   // Ensure sure that the code matches the current oop values
 192   RelocIterator iter(this, NULL, NULL);
 193   while (iter.next()) {
 194     if (iter.type() == relocInfo::oop_type) {
 195       oop_Relocation* reloc = iter.oop_reloc();
 196       if (!reloc->oop_is_immediate()) {
 197         reloc->verify_oop_relocation();
 198       }
 199     }
 200   }
 201 }
 202 
 203 
 204 ScopeDesc* CompiledMethod::scope_desc_at(address pc) {
 205   PcDesc* pd = pc_desc_at(pc);
 206   guarantee(pd != NULL, "scope must be present");
 207   return new ScopeDesc(this, pd->scope_decode_offset(),
 208                        pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
 209                        pd->return_oop());
 210 }
 211 
 212 ScopeDesc* CompiledMethod::scope_desc_near(address pc) {
 213   PcDesc* pd = pc_desc_near(pc);
 214   guarantee(pd != NULL, "scope must be present");
 215   return new ScopeDesc(this, pd->scope_decode_offset(),
 216                        pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
 217                        pd->return_oop());
 218 }
 219 
 220 void CompiledMethod::cleanup_inline_caches(bool clean_all/*=false*/) {
 221   assert_locked_or_safepoint(CompiledIC_lock);
 222 
 223   // If the method is not entrant or zombie then a JMP is plastered over the
 224   // first few bytes.  If an oop in the old code was there, that oop
 225   // should not get GC'd.  Skip the first few bytes of oops on
 226   // not-entrant methods.
 227   address low_boundary = verified_entry_point();
 228   if (!is_in_use() && is_nmethod()) {
 229     low_boundary += NativeJump::instruction_size;
 230     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 231     // This means that the low_boundary is going to be a little too high.
 232     // This shouldn't matter, since oops of non-entrant methods are never used.
 233     // In fact, why are we bothering to look at oops in a non-entrant method??
 234   }
 235 
 236   // Find all calls in an nmethod and clear the ones that point to non-entrant,
 237   // zombie and unloaded nmethods.
 238   ResourceMark rm;
 239   RelocIterator iter(this, low_boundary);
 240   while(iter.next()) {
 241     switch(iter.type()) {
 242       case relocInfo::virtual_call_type:
 243       case relocInfo::opt_virtual_call_type: {
 244         CompiledIC *ic = CompiledIC_at(&iter);
 245         // Ok, to lookup references to zombies here
 246         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
 247         if( cb != NULL && cb->is_compiled() ) {
 248           CompiledMethod* nm = cb->as_compiled_method();
 249           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
 250           if (clean_all || !nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
 251         }
 252         break;
 253       }
 254       case relocInfo::static_call_type: {
 255           CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
 256           CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
 257           if( cb != NULL && cb->is_compiled() ) {
 258             CompiledMethod* cm = cb->as_compiled_method();
 259             // Clean inline caches pointing to zombie, non-entrant and unloaded methods
 260             if (clean_all || !cm->is_in_use() || (cm->method()->code() != cm)) {
 261               csc->set_to_clean();
 262             }
 263           }
 264         break;
 265       }
 266       default:
 267         break;
 268     }
 269   }
 270 }
 271 
 272 int CompiledMethod::verify_icholder_relocations() {
 273   ResourceMark rm;
 274   int count = 0;
 275 
 276   RelocIterator iter(this);
 277   while(iter.next()) {
 278     if (iter.type() == relocInfo::virtual_call_type) {
 279       if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc(), this)) {
 280         CompiledIC *ic = CompiledIC_at(&iter);
 281         if (TraceCompiledIC) {
 282           tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder()));
 283           ic->print();
 284         }
 285         assert(ic->cached_icholder() != NULL, "must be non-NULL");
 286         count++;
 287       }
 288     }
 289   }
 290 
 291   return count;
 292 }
 293 
 294 // Method that knows how to preserve outgoing arguments at call. This method must be
 295 // called with a frame corresponding to a Java invoke
 296 void CompiledMethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
 297   if (method() != NULL && !method()->is_native()) {
 298     address pc = fr.pc();
 299     SimpleScopeDesc ssd(this, pc);
 300     Bytecode_invoke call(ssd.method(), ssd.bci());
 301     bool has_receiver = call.has_receiver();
 302     bool has_appendix = call.has_appendix();
 303     Symbol* signature = call.signature();
 304 
 305     // The method attached by JIT-compilers should be used, if present.
 306     // Bytecode can be inaccurate in such case.
 307     Method* callee = attached_method_before_pc(pc);
 308     if (callee != NULL) {
 309       has_receiver = !(callee->access_flags().is_static());
 310       has_appendix = false;
 311       signature = callee->signature();
 312     }
 313 
 314     fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
 315   }
 316 }
 317 
 318 Method* CompiledMethod::attached_method(address call_instr) {
 319   assert(code_contains(call_instr), "not part of the nmethod");
 320   RelocIterator iter(this, call_instr, call_instr + 1);
 321   while (iter.next()) {
 322     if (iter.addr() == call_instr) {
 323       switch(iter.type()) {
 324         case relocInfo::static_call_type:      return iter.static_call_reloc()->method_value();
 325         case relocInfo::opt_virtual_call_type: return iter.opt_virtual_call_reloc()->method_value();
 326         case relocInfo::virtual_call_type:     return iter.virtual_call_reloc()->method_value();
 327         default:                               break;
 328       }
 329     }
 330   }
 331   return NULL; // not found
 332 }
 333 
 334 Method* CompiledMethod::attached_method_before_pc(address pc) {
 335   if (NativeCall::is_call_before(pc)) {
 336     NativeCall* ncall = nativeCall_before(pc);
 337     return attached_method(ncall->instruction_address());
 338   }
 339   return NULL; // not a call
 340 }
 341 
 342 void CompiledMethod::clear_inline_caches() {
 343   assert(SafepointSynchronize::is_at_safepoint(), "cleaning of IC's only allowed at safepoint");
 344   if (is_zombie()) {
 345     return;
 346   }
 347 
 348   RelocIterator iter(this);
 349   while (iter.next()) {
 350     iter.reloc()->clear_inline_cache();
 351   }
 352 }
 353 
 354 // Clear ICStubs of all compiled ICs
 355 void CompiledMethod::clear_ic_stubs() {
 356   assert_locked_or_safepoint(CompiledIC_lock);
 357   RelocIterator iter(this);
 358   while(iter.next()) {
 359     if (iter.type() == relocInfo::virtual_call_type) {
 360       CompiledIC* ic = CompiledIC_at(&iter);
 361       ic->clear_ic_stub();
 362     }
 363   }
 364 }
 365 
 366 #ifdef ASSERT
 367 
 368 class CheckClass : AllStatic {
 369   static BoolObjectClosure* _is_alive;
 370 
 371   // Check class_loader is alive for this bit of metadata.
 372   static void check_class(Metadata* md) {
 373     Klass* klass = NULL;
 374     if (md->is_klass()) {
 375       klass = ((Klass*)md);
 376     } else if (md->is_method()) {
 377       klass = ((Method*)md)->method_holder();
 378     } else if (md->is_methodData()) {
 379       klass = ((MethodData*)md)->method()->method_holder();
 380     } else {
 381       md->print();
 382       ShouldNotReachHere();
 383     }
 384     assert(klass->is_loader_alive(_is_alive), "must be alive");
 385   }
 386  public:
 387   static void do_check_class(BoolObjectClosure* is_alive, CompiledMethod* nm) {
 388     assert(SafepointSynchronize::is_at_safepoint(), "this is only ok at safepoint");
 389     _is_alive = is_alive;
 390     nm->metadata_do(check_class);
 391   }
 392 };
 393 
 394 // This is called during a safepoint so can use static data
 395 BoolObjectClosure* CheckClass::_is_alive = NULL;
 396 #endif // ASSERT
 397 
 398 
 399 void CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) {
 400   if (ic->is_icholder_call()) {
 401     // The only exception is compiledICHolder oops which may
 402     // yet be marked below. (We check this further below).
 403     CompiledICHolder* cichk_oop = ic->cached_icholder();
 404 
 405     if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
 406         cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
 407       return;
 408     }
 409   } else {
 410     Metadata* ic_oop = ic->cached_metadata();
 411     if (ic_oop != NULL) {
 412       if (ic_oop->is_klass()) {
 413         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
 414           return;
 415         }
 416       } else if (ic_oop->is_method()) {
 417         if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
 418           return;
 419         }
 420       } else {
 421         ShouldNotReachHere();
 422       }
 423     }
 424   }
 425 
 426   ic->set_to_clean();
 427 }
 428 
 429 unsigned char CompiledMethod::_global_unloading_clock = 0;
 430 
 431 void CompiledMethod::increase_unloading_clock() {
 432   _global_unloading_clock++;
 433   if (_global_unloading_clock == 0) {
 434     // _nmethods are allocated with _unloading_clock == 0,
 435     // so 0 is never used as a clock value.
 436     _global_unloading_clock = 1;
 437   }
 438 }
 439 
 440 void CompiledMethod::set_unloading_clock(unsigned char unloading_clock) {
 441   OrderAccess::release_store((volatile jubyte*)&_unloading_clock, unloading_clock);
 442 }
 443 
 444 unsigned char CompiledMethod::unloading_clock() {
 445   return (unsigned char)OrderAccess::load_acquire((volatile jubyte*)&_unloading_clock);
 446 }
 447 
 448 // Processing of oop references should have been sufficient to keep
 449 // all strong references alive.  Any weak references should have been
 450 // cleared as well.  Visit all the metadata and ensure that it's
 451 // really alive.
 452 void CompiledMethod::verify_metadata_loaders(address low_boundary, BoolObjectClosure* is_alive) {
 453 #ifdef ASSERT
 454     RelocIterator iter(this, low_boundary);
 455     while (iter.next()) {
 456     // static_stub_Relocations may have dangling references to
 457     // Method*s so trim them out here.  Otherwise it looks like
 458     // compiled code is maintaining a link to dead metadata.
 459     address static_call_addr = NULL;
 460     if (iter.type() == relocInfo::opt_virtual_call_type) {
 461       CompiledIC* cic = CompiledIC_at(&iter);
 462       if (!cic->is_call_to_interpreted()) {
 463         static_call_addr = iter.addr();
 464       }
 465     } else if (iter.type() == relocInfo::static_call_type) {
 466       CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
 467       if (!csc->is_call_to_interpreted()) {
 468         static_call_addr = iter.addr();
 469       }
 470     }
 471     if (static_call_addr != NULL) {
 472       RelocIterator sciter(this, low_boundary);
 473       while (sciter.next()) {
 474         if (sciter.type() == relocInfo::static_stub_type &&
 475             sciter.static_stub_reloc()->static_call() == static_call_addr) {
 476           sciter.static_stub_reloc()->clear_inline_cache();
 477         }
 478       }
 479     }
 480   }
 481   // Check that the metadata embedded in the nmethod is alive
 482   CheckClass::do_check_class(is_alive, this);
 483 #endif
 484 }
 485 
 486 // This is called at the end of the strong tracing/marking phase of a
 487 // GC to unload an nmethod if it contains otherwise unreachable
 488 // oops.
 489 
 490 void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
 491   // Make sure the oop's ready to receive visitors
 492   assert(!is_zombie() && !is_unloaded(),
 493          "should not call follow on zombie or unloaded nmethod");
 494 
 495   // If the method is not entrant then a JMP is plastered over the
 496   // first few bytes.  If an oop in the old code was there, that oop
 497   // should not get GC'd.  Skip the first few bytes of oops on
 498   // not-entrant methods.
 499   address low_boundary = verified_entry_point();
 500   if (is_not_entrant()) {
 501     low_boundary += NativeJump::instruction_size;
 502     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 503     // (See comment above.)
 504   }
 505 
 506   // The RedefineClasses() API can cause the class unloading invariant
 507   // to no longer be true. See jvmtiExport.hpp for details.
 508   // Also, leave a debugging breadcrumb in local flag.
 509   if (JvmtiExport::has_redefined_a_class()) {
 510     // This set of the unloading_occurred flag is done before the
 511     // call to post_compiled_method_unload() so that the unloading
 512     // of this nmethod is reported.
 513     unloading_occurred = true;
 514   }
 515 
 516   // Exception cache
 517   clean_exception_cache(is_alive);
 518 
 519   // If class unloading occurred we first iterate over all inline caches and
 520   // clear ICs where the cached oop is referring to an unloaded klass or method.
 521   // The remaining live cached oops will be traversed in the relocInfo::oop_type
 522   // iteration below.
 523   if (unloading_occurred) {
 524     RelocIterator iter(this, low_boundary);
 525     while(iter.next()) {
 526       if (iter.type() == relocInfo::virtual_call_type) {
 527         CompiledIC *ic = CompiledIC_at(&iter);
 528         clean_ic_if_metadata_is_dead(ic, is_alive);
 529       }
 530     }
 531   }
 532 
 533   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
 534     return;
 535   }
 536 
 537 #if INCLUDE_JVMCI
 538   if (do_unloading_jvmci(is_alive, unloading_occurred)) {
 539     return;
 540   }
 541 #endif
 542 
 543   // Ensure that all metadata is still alive
 544   verify_metadata_loaders(low_boundary, is_alive);
 545 }
 546 
 547 template <class CompiledICorStaticCall>
 548 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, CompiledMethod* from) {
 549   // Ok, to lookup references to zombies here
 550   CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
 551   CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
 552   if (nm != NULL) {
 553     if (nm->unloading_clock() != CompiledMethod::global_unloading_clock()) {
 554       // The nmethod has not been processed yet.
 555       return true;
 556     }
 557 
 558     // Clean inline caches pointing to both zombie and not_entrant methods
 559     if (!nm->is_in_use() || (nm->method()->code() != nm)) {
 560       ic->set_to_clean();
 561       assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
 562     }
 563   }
 564 
 565   return false;
 566 }
 567 
 568 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, CompiledMethod* from) {
 569   return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from);
 570 }
 571 
 572 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, CompiledMethod* from) {
 573   return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from);
 574 }
 575 
 576 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
 577   ResourceMark rm;
 578 
 579   // Make sure the oop's ready to receive visitors
 580   assert(!is_zombie() && !is_unloaded(),
 581          "should not call follow on zombie or unloaded nmethod");
 582 
 583   // If the method is not entrant then a JMP is plastered over the
 584   // first few bytes.  If an oop in the old code was there, that oop
 585   // should not get GC'd.  Skip the first few bytes of oops on
 586   // not-entrant methods.
 587   address low_boundary = verified_entry_point();
 588   if (is_not_entrant()) {
 589     low_boundary += NativeJump::instruction_size;
 590     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 591     // (See comment above.)
 592   }
 593 
 594   // The RedefineClasses() API can cause the class unloading invariant
 595   // to no longer be true. See jvmtiExport.hpp for details.
 596   // Also, leave a debugging breadcrumb in local flag.
 597   if (JvmtiExport::has_redefined_a_class()) {
 598     // This set of the unloading_occurred flag is done before the
 599     // call to post_compiled_method_unload() so that the unloading
 600     // of this nmethod is reported.
 601     unloading_occurred = true;
 602   }
 603 
 604   // Exception cache
 605   clean_exception_cache(is_alive);
 606 
 607   bool postponed = false;
 608 
 609   RelocIterator iter(this, low_boundary);
 610   while(iter.next()) {
 611 
 612     switch (iter.type()) {
 613 
 614     case relocInfo::virtual_call_type:
 615       if (unloading_occurred) {
 616         // If class unloading occurred we first iterate over all inline caches and
 617         // clear ICs where the cached oop is referring to an unloaded klass or method.
 618         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
 619       }
 620 
 621       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 622       break;
 623 
 624     case relocInfo::opt_virtual_call_type:
 625       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 626       break;
 627 
 628     case relocInfo::static_call_type:
 629       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
 630       break;
 631 
 632     case relocInfo::oop_type:
 633       // handled by do_unloading_oops below
 634       break;
 635 
 636     case relocInfo::metadata_type:
 637       break; // nothing to do.
 638 
 639     default:
 640       break;
 641     }
 642   }
 643 
 644   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
 645     return postponed;
 646   }
 647 
 648 #if INCLUDE_JVMCI
 649   if (do_unloading_jvmci(is_alive, unloading_occurred)) {
 650     return postponed;
 651   }
 652 #endif
 653 
 654   // Ensure that all metadata is still alive
 655   verify_metadata_loaders(low_boundary, is_alive);
 656 
 657   return postponed;
 658 }
 659 
 660 void CompiledMethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) {
 661   ResourceMark rm;
 662 
 663   // Make sure the oop's ready to receive visitors
 664   assert(!is_zombie(),
 665          "should not call follow on zombie nmethod");
 666 
 667   // If the method is not entrant then a JMP is plastered over the
 668   // first few bytes.  If an oop in the old code was there, that oop
 669   // should not get GC'd.  Skip the first few bytes of oops on
 670   // not-entrant methods.
 671   address low_boundary = verified_entry_point();
 672   if (is_not_entrant()) {
 673     low_boundary += NativeJump::instruction_size;
 674     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 675     // (See comment above.)
 676   }
 677 
 678   RelocIterator iter(this, low_boundary);
 679   while(iter.next()) {
 680 
 681     switch (iter.type()) {
 682 
 683     case relocInfo::virtual_call_type:
 684       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 685       break;
 686 
 687     case relocInfo::opt_virtual_call_type:
 688       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 689       break;
 690 
 691     case relocInfo::static_call_type:
 692       clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
 693       break;
 694 
 695     default:
 696       break;
 697     }
 698   }
 699 }