1 /*
   2  * Copyright (c) 2015, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "code/compiledIC.hpp"
  27 #include "code/compiledMethod.inline.hpp"
  28 #include "code/scopeDesc.hpp"
  29 #include "code/codeCache.hpp"
  30 #include "prims/methodHandles.hpp"
  31 #include "interpreter/bytecode.inline.hpp"
  32 #include "memory/resourceArea.hpp"
  33 #include "oops/method.inline.hpp"
  34 #include "runtime/mutexLocker.hpp"
  35 
  36 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments)
  37   : CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
  38   _method(method), _mark_for_deoptimization_status(not_marked) {
  39   init_defaults();
  40 }
  41 
  42 CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, int size, int header_size, CodeBuffer* cb, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
  43   : CodeBlob(name, type, CodeBlobLayout((address) this, size, header_size, cb), cb, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
  44   _method(method), _mark_for_deoptimization_status(not_marked) {
  45   init_defaults();
  46 }
  47 
  48 void CompiledMethod::init_defaults() {
  49   _has_unsafe_access          = 0;
  50   _has_method_handle_invokes  = 0;
  51   _lazy_critical_native       = 0;
  52   _has_wide_vectors           = 0;
  53   _unloading_clock            = 0;
  54 }
  55 
  56 bool CompiledMethod::is_method_handle_return(address return_pc) {
  57   if (!has_method_handle_invokes())  return false;
  58   PcDesc* pd = pc_desc_at(return_pc);
  59   if (pd == NULL)
  60     return false;
  61   return pd->is_method_handle_invoke();
  62 }
  63 
  64 // Returns a string version of the method state.
  65 const char* CompiledMethod::state() const {
  66   int state = get_state();
  67   switch (state) {
  68   case not_installed:
  69     return "not installed";
  70   case in_use:
  71     return "in use";
  72   case not_used:
  73     return "not_used";
  74   case not_entrant:
  75     return "not_entrant";
  76   case zombie:
  77     return "zombie";
  78   case unloaded:
  79     return "unloaded";
  80   default:
  81     fatal("unexpected method state: %d", state);
  82     return NULL;
  83   }
  84 }
  85 
  86 //-----------------------------------------------------------------------------
  87 
  88 void CompiledMethod::add_exception_cache_entry(ExceptionCache* new_entry) {
  89   assert(ExceptionCache_lock->owned_by_self(),"Must hold the ExceptionCache_lock");
  90   assert(new_entry != NULL,"Must be non null");
  91   assert(new_entry->next() == NULL, "Must be null");
  92 
  93   ExceptionCache *ec = exception_cache();
  94   if (ec != NULL) {
  95     new_entry->set_next(ec);
  96   }
  97   release_set_exception_cache(new_entry);
  98 }
  99 
 100 void CompiledMethod::clean_exception_cache(BoolObjectClosure* is_alive) {
 101   ExceptionCache* prev = NULL;
 102   ExceptionCache* curr = exception_cache();
 103 
 104   while (curr != NULL) {
 105     ExceptionCache* next = curr->next();
 106 
 107     Klass* ex_klass = curr->exception_type();
 108     if (ex_klass != NULL && !ex_klass->is_loader_alive(is_alive)) {
 109       if (prev == NULL) {
 110         set_exception_cache(next);
 111       } else {
 112         prev->set_next(next);
 113       }
 114       delete curr;
 115       // prev stays the same.
 116     } else {
 117       prev = curr;
 118     }
 119 
 120     curr = next;
 121   }
 122 }
 123 
 124 // public method for accessing the exception cache
 125 // These are the public access methods.
 126 address CompiledMethod::handler_for_exception_and_pc(Handle exception, address pc) {
 127   // We never grab a lock to read the exception cache, so we may
 128   // have false negatives. This is okay, as it can only happen during
 129   // the first few exception lookups for a given nmethod.
 130   ExceptionCache* ec = exception_cache();
 131   while (ec != NULL) {
 132     address ret_val;
 133     if ((ret_val = ec->match(exception,pc)) != NULL) {
 134       return ret_val;
 135     }
 136     ec = ec->next();
 137   }
 138   return NULL;
 139 }
 140 
 141 void CompiledMethod::add_handler_for_exception_and_pc(Handle exception, address pc, address handler) {
 142   // There are potential race conditions during exception cache updates, so we
 143   // must own the ExceptionCache_lock before doing ANY modifications. Because
 144   // we don't lock during reads, it is possible to have several threads attempt
 145   // to update the cache with the same data. We need to check for already inserted
 146   // copies of the current data before adding it.
 147 
 148   MutexLocker ml(ExceptionCache_lock);
 149   ExceptionCache* target_entry = exception_cache_entry_for_exception(exception);
 150 
 151   if (target_entry == NULL || !target_entry->add_address_and_handler(pc,handler)) {
 152     target_entry = new ExceptionCache(exception,pc,handler);
 153     add_exception_cache_entry(target_entry);
 154   }
 155 }
 156 
 157 //-------------end of code for ExceptionCache--------------
 158 
 159 // private method for handling exception cache
 160 // These methods are private, and used to manipulate the exception cache
 161 // directly.
 162 ExceptionCache* CompiledMethod::exception_cache_entry_for_exception(Handle exception) {
 163   ExceptionCache* ec = exception_cache();
 164   while (ec != NULL) {
 165     if (ec->match_exception_with_space(exception)) {
 166       return ec;
 167     }
 168     ec = ec->next();
 169   }
 170   return NULL;
 171 }
 172 
 173 bool CompiledMethod::is_at_poll_return(address pc) {
 174   RelocIterator iter(this, pc, pc+1);
 175   while (iter.next()) {
 176     if (iter.type() == relocInfo::poll_return_type)
 177       return true;
 178   }
 179   return false;
 180 }
 181 
 182 
 183 bool CompiledMethod::is_at_poll_or_poll_return(address pc) {
 184   RelocIterator iter(this, pc, pc+1);
 185   while (iter.next()) {
 186     relocInfo::relocType t = iter.type();
 187     if (t == relocInfo::poll_return_type || t == relocInfo::poll_type)
 188       return true;
 189   }
 190   return false;
 191 }
 192 
 193 void CompiledMethod::verify_oop_relocations() {
 194   // Ensure sure that the code matches the current oop values
 195   RelocIterator iter(this, NULL, NULL);
 196   while (iter.next()) {
 197     if (iter.type() == relocInfo::oop_type) {
 198       oop_Relocation* reloc = iter.oop_reloc();
 199       if (!reloc->oop_is_immediate()) {
 200         reloc->verify_oop_relocation();
 201       }
 202     }
 203   }
 204 }
 205 
 206 
 207 ScopeDesc* CompiledMethod::scope_desc_at(address pc) {
 208   PcDesc* pd = pc_desc_at(pc);
 209   guarantee(pd != NULL, "scope must be present");
 210   return new ScopeDesc(this, pd->scope_decode_offset(),
 211                        pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
 212                        pd->return_oop());
 213 }
 214 
 215 ScopeDesc* CompiledMethod::scope_desc_near(address pc) {
 216   PcDesc* pd = pc_desc_near(pc);
 217   guarantee(pd != NULL, "scope must be present");
 218   return new ScopeDesc(this, pd->scope_decode_offset(),
 219                        pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
 220                        pd->return_oop());
 221 }
 222 
 223 void CompiledMethod::cleanup_inline_caches(bool clean_all/*=false*/) {
 224   assert_locked_or_safepoint(CompiledIC_lock);
 225 
 226   // If the method is not entrant or zombie then a JMP is plastered over the
 227   // first few bytes.  If an oop in the old code was there, that oop
 228   // should not get GC'd.  Skip the first few bytes of oops on
 229   // not-entrant methods.
 230   address low_boundary = verified_entry_point();
 231   if (!is_in_use() && is_nmethod()) {
 232     low_boundary += NativeJump::instruction_size;
 233     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 234     // This means that the low_boundary is going to be a little too high.
 235     // This shouldn't matter, since oops of non-entrant methods are never used.
 236     // In fact, why are we bothering to look at oops in a non-entrant method??
 237   }
 238 
 239   // Find all calls in an nmethod and clear the ones that point to non-entrant,
 240   // zombie and unloaded nmethods.
 241   ResourceMark rm;
 242   RelocIterator iter(this, low_boundary);
 243   while(iter.next()) {
 244     switch(iter.type()) {
 245       case relocInfo::virtual_call_type:
 246       case relocInfo::opt_virtual_call_type: {
 247         CompiledIC *ic = CompiledIC_at(&iter);
 248         // Ok, to lookup references to zombies here
 249         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
 250         if( cb != NULL && cb->is_compiled() ) {
 251           CompiledMethod* nm = cb->as_compiled_method();
 252           // Clean inline caches pointing to zombie, non-entrant and unloaded methods
 253           if (clean_all || !nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
 254         }
 255         break;
 256       }
 257       case relocInfo::static_call_type: {
 258           CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
 259           CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
 260           if( cb != NULL && cb->is_compiled() ) {
 261             CompiledMethod* cm = cb->as_compiled_method();
 262             // Clean inline caches pointing to zombie, non-entrant and unloaded methods
 263             if (clean_all || !cm->is_in_use() || (cm->method()->code() != cm)) {
 264               csc->set_to_clean();
 265             }
 266           }
 267         break;
 268       }
 269       default:
 270         break;
 271     }
 272   }
 273 }
 274 
 275 int CompiledMethod::verify_icholder_relocations() {
 276   ResourceMark rm;
 277   int count = 0;
 278 
 279   RelocIterator iter(this);
 280   while(iter.next()) {
 281     if (iter.type() == relocInfo::virtual_call_type) {
 282       if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc(), this)) {
 283         CompiledIC *ic = CompiledIC_at(&iter);
 284         if (TraceCompiledIC) {
 285           tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder()));
 286           ic->print();
 287         }
 288         assert(ic->cached_icholder() != NULL, "must be non-NULL");
 289         count++;
 290       }
 291     }
 292   }
 293 
 294   return count;
 295 }
 296 
 297 // Method that knows how to preserve outgoing arguments at call. This method must be
 298 // called with a frame corresponding to a Java invoke
 299 void CompiledMethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
 300   if (method() != NULL && !method()->is_native()) {
 301     address pc = fr.pc();
 302     SimpleScopeDesc ssd(this, pc);
 303     Bytecode_invoke call(ssd.method(), ssd.bci());
 304     bool has_receiver = call.has_receiver();
 305     bool has_appendix = call.has_appendix();
 306     Symbol* signature = call.signature();
 307 
 308     // The method attached by JIT-compilers should be used, if present.
 309     // Bytecode can be inaccurate in such case.
 310     Method* callee = attached_method_before_pc(pc);
 311     if (callee != NULL) {
 312       has_receiver = !(callee->access_flags().is_static());
 313       has_appendix = false;
 314       signature = callee->signature();
 315     }
 316 
 317     fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
 318   }
 319 }
 320 
 321 Method* CompiledMethod::attached_method(address call_instr) {
 322   assert(code_contains(call_instr), "not part of the nmethod");
 323   RelocIterator iter(this, call_instr, call_instr + 1);
 324   while (iter.next()) {
 325     if (iter.addr() == call_instr) {
 326       switch(iter.type()) {
 327         case relocInfo::static_call_type:      return iter.static_call_reloc()->method_value();
 328         case relocInfo::opt_virtual_call_type: return iter.opt_virtual_call_reloc()->method_value();
 329         case relocInfo::virtual_call_type:     return iter.virtual_call_reloc()->method_value();
 330         default:                               break;
 331       }
 332     }
 333   }
 334   return NULL; // not found
 335 }
 336 
 337 Method* CompiledMethod::attached_method_before_pc(address pc) {
 338   if (NativeCall::is_call_before(pc)) {
 339     NativeCall* ncall = nativeCall_before(pc);
 340     return attached_method(ncall->instruction_address());
 341   }
 342   return NULL; // not a call
 343 }
 344 
 345 void CompiledMethod::clear_inline_caches() {
 346   assert(SafepointSynchronize::is_at_safepoint(), "cleaning of IC's only allowed at safepoint");
 347   if (is_zombie()) {
 348     return;
 349   }
 350 
 351   RelocIterator iter(this);
 352   while (iter.next()) {
 353     iter.reloc()->clear_inline_cache();
 354   }
 355 }
 356 
 357 // Clear ICStubs of all compiled ICs
 358 void CompiledMethod::clear_ic_stubs() {
 359   assert_locked_or_safepoint(CompiledIC_lock);
 360   RelocIterator iter(this);
 361   while(iter.next()) {
 362     if (iter.type() == relocInfo::virtual_call_type) {
 363       CompiledIC* ic = CompiledIC_at(&iter);
 364       ic->clear_ic_stub();
 365     }
 366   }
 367 }
 368 
 369 #ifdef ASSERT
 370 
 371 class CheckClass : AllStatic {
 372   static BoolObjectClosure* _is_alive;
 373 
 374   // Check class_loader is alive for this bit of metadata.
 375   static void check_class(Metadata* md) {
 376     Klass* klass = NULL;
 377     if (md->is_klass()) {
 378       klass = ((Klass*)md);
 379     } else if (md->is_method()) {
 380       klass = ((Method*)md)->method_holder();
 381     } else if (md->is_methodData()) {
 382       klass = ((MethodData*)md)->method()->method_holder();
 383     } else {
 384       md->print();
 385       ShouldNotReachHere();
 386     }
 387     assert(klass->is_loader_alive(_is_alive), "must be alive");
 388   }
 389  public:
 390   static void do_check_class(BoolObjectClosure* is_alive, CompiledMethod* nm) {
 391     assert(SafepointSynchronize::is_at_safepoint(), "this is only ok at safepoint");
 392     _is_alive = is_alive;
 393     nm->metadata_do(check_class);
 394   }
 395 };
 396 
 397 // This is called during a safepoint so can use static data
 398 BoolObjectClosure* CheckClass::_is_alive = NULL;
 399 #endif // ASSERT
 400 
 401 
 402 void CompiledMethod::clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) {
 403   if (ic->is_icholder_call()) {
 404     // The only exception is compiledICHolder oops which may
 405     // yet be marked below. (We check this further below).
 406     CompiledICHolder* cichk_oop = ic->cached_icholder();
 407 
 408     if (cichk_oop->is_loader_alive(is_alive)) {
 409       return;
 410     }
 411   } else {
 412     Metadata* ic_oop = ic->cached_metadata();
 413     if (ic_oop != NULL) {
 414       if (ic_oop->is_klass()) {
 415         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
 416           return;
 417         }
 418       } else if (ic_oop->is_method()) {
 419         if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
 420           return;
 421         }
 422       } else {
 423         ShouldNotReachHere();
 424       }
 425     }
 426   }
 427 
 428   ic->set_to_clean();
 429 }
 430 
 431 unsigned char CompiledMethod::_global_unloading_clock = 0;
 432 
 433 void CompiledMethod::increase_unloading_clock() {
 434   _global_unloading_clock++;
 435   if (_global_unloading_clock == 0) {
 436     // _nmethods are allocated with _unloading_clock == 0,
 437     // so 0 is never used as a clock value.
 438     _global_unloading_clock = 1;
 439   }
 440 }
 441 
 442 void CompiledMethod::set_unloading_clock(unsigned char unloading_clock) {
 443   OrderAccess::release_store(&_unloading_clock, unloading_clock);
 444 }
 445 
 446 unsigned char CompiledMethod::unloading_clock() {
 447   return OrderAccess::load_acquire(&_unloading_clock);
 448 }
 449 
 450 // Processing of oop references should have been sufficient to keep
 451 // all strong references alive.  Any weak references should have been
 452 // cleared as well.  Visit all the metadata and ensure that it's
 453 // really alive.
 454 void CompiledMethod::verify_metadata_loaders(address low_boundary, BoolObjectClosure* is_alive) {
 455 #ifdef ASSERT
 456     RelocIterator iter(this, low_boundary);
 457     while (iter.next()) {
 458     // static_stub_Relocations may have dangling references to
 459     // Method*s so trim them out here.  Otherwise it looks like
 460     // compiled code is maintaining a link to dead metadata.
 461     address static_call_addr = NULL;
 462     if (iter.type() == relocInfo::opt_virtual_call_type) {
 463       CompiledIC* cic = CompiledIC_at(&iter);
 464       if (!cic->is_call_to_interpreted()) {
 465         static_call_addr = iter.addr();
 466       }
 467     } else if (iter.type() == relocInfo::static_call_type) {
 468       CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
 469       if (!csc->is_call_to_interpreted()) {
 470         static_call_addr = iter.addr();
 471       }
 472     }
 473     if (static_call_addr != NULL) {
 474       RelocIterator sciter(this, low_boundary);
 475       while (sciter.next()) {
 476         if (sciter.type() == relocInfo::static_stub_type &&
 477             sciter.static_stub_reloc()->static_call() == static_call_addr) {
 478           sciter.static_stub_reloc()->clear_inline_cache();
 479         }
 480       }
 481     }
 482   }
 483   // Check that the metadata embedded in the nmethod is alive
 484   CheckClass::do_check_class(is_alive, this);
 485 #endif
 486 }
 487 
 488 // This is called at the end of the strong tracing/marking phase of a
 489 // GC to unload an nmethod if it contains otherwise unreachable
 490 // oops.
 491 
 492 void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
 493   // Make sure the oop's ready to receive visitors
 494   assert(!is_zombie() && !is_unloaded(),
 495          "should not call follow on zombie or unloaded nmethod");
 496 
 497   // If the method is not entrant then a JMP is plastered over the
 498   // first few bytes.  If an oop in the old code was there, that oop
 499   // should not get GC'd.  Skip the first few bytes of oops on
 500   // not-entrant methods.
 501   address low_boundary = verified_entry_point();
 502   if (is_not_entrant()) {
 503     low_boundary += NativeJump::instruction_size;
 504     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 505     // (See comment above.)
 506   }
 507 
 508   // The RedefineClasses() API can cause the class unloading invariant
 509   // to no longer be true. See jvmtiExport.hpp for details.
 510   // Also, leave a debugging breadcrumb in local flag.
 511   if (JvmtiExport::has_redefined_a_class()) {
 512     // This set of the unloading_occurred flag is done before the
 513     // call to post_compiled_method_unload() so that the unloading
 514     // of this nmethod is reported.
 515     unloading_occurred = true;
 516   }
 517 
 518   // Exception cache
 519   clean_exception_cache(is_alive);
 520 
 521   // If class unloading occurred we first iterate over all inline caches and
 522   // clear ICs where the cached oop is referring to an unloaded klass or method.
 523   // The remaining live cached oops will be traversed in the relocInfo::oop_type
 524   // iteration below.
 525   if (unloading_occurred) {
 526     RelocIterator iter(this, low_boundary);
 527     while(iter.next()) {
 528       if (iter.type() == relocInfo::virtual_call_type) {
 529         CompiledIC *ic = CompiledIC_at(&iter);
 530         clean_ic_if_metadata_is_dead(ic, is_alive);
 531       }
 532     }
 533   }
 534 
 535   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
 536     return;
 537   }
 538 
 539 #if INCLUDE_JVMCI
 540   if (do_unloading_jvmci(is_alive, unloading_occurred)) {
 541     return;
 542   }
 543 #endif
 544 
 545   // Ensure that all metadata is still alive
 546   verify_metadata_loaders(low_boundary, is_alive);
 547 }
 548 
 549 template <class CompiledICorStaticCall>
 550 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, CompiledMethod* from) {
 551   // Ok, to lookup references to zombies here
 552   CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
 553   CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
 554   if (nm != NULL) {
 555     if (nm->unloading_clock() != CompiledMethod::global_unloading_clock()) {
 556       // The nmethod has not been processed yet.
 557       return true;
 558     }
 559 
 560     // Clean inline caches pointing to both zombie and not_entrant methods
 561     if (!nm->is_in_use() || (nm->method()->code() != nm)) {
 562       ic->set_to_clean();
 563       assert(ic->is_clean(), "nmethod " PTR_FORMAT "not clean %s", p2i(from), from->method()->name_and_sig_as_C_string());
 564     }
 565   }
 566 
 567   return false;
 568 }
 569 
 570 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, CompiledMethod* from) {
 571   return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from);
 572 }
 573 
 574 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, CompiledMethod* from) {
 575   return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from);
 576 }
 577 
 578 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
 579   ResourceMark rm;
 580 
 581   // Make sure the oop's ready to receive visitors
 582   assert(!is_zombie() && !is_unloaded(),
 583          "should not call follow on zombie or unloaded nmethod");
 584 
 585   // If the method is not entrant then a JMP is plastered over the
 586   // first few bytes.  If an oop in the old code was there, that oop
 587   // should not get GC'd.  Skip the first few bytes of oops on
 588   // not-entrant methods.
 589   address low_boundary = verified_entry_point();
 590   if (is_not_entrant()) {
 591     low_boundary += NativeJump::instruction_size;
 592     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 593     // (See comment above.)
 594   }
 595 
 596   // The RedefineClasses() API can cause the class unloading invariant
 597   // to no longer be true. See jvmtiExport.hpp for details.
 598   // Also, leave a debugging breadcrumb in local flag.
 599   if (JvmtiExport::has_redefined_a_class()) {
 600     // This set of the unloading_occurred flag is done before the
 601     // call to post_compiled_method_unload() so that the unloading
 602     // of this nmethod is reported.
 603     unloading_occurred = true;
 604   }
 605 
 606   // Exception cache
 607   clean_exception_cache(is_alive);
 608 
 609   bool postponed = false;
 610 
 611   RelocIterator iter(this, low_boundary);
 612   while(iter.next()) {
 613 
 614     switch (iter.type()) {
 615 
 616     case relocInfo::virtual_call_type:
 617       if (unloading_occurred) {
 618         // If class unloading occurred we first iterate over all inline caches and
 619         // clear ICs where the cached oop is referring to an unloaded klass or method.
 620         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
 621       }
 622 
 623       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 624       break;
 625 
 626     case relocInfo::opt_virtual_call_type:
 627       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 628       break;
 629 
 630     case relocInfo::static_call_type:
 631       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
 632       break;
 633 
 634     case relocInfo::oop_type:
 635       // handled by do_unloading_oops below
 636       break;
 637 
 638     case relocInfo::metadata_type:
 639       break; // nothing to do.
 640 
 641     default:
 642       break;
 643     }
 644   }
 645 
 646   if (do_unloading_oops(low_boundary, is_alive, unloading_occurred)) {
 647     return postponed;
 648   }
 649 
 650 #if INCLUDE_JVMCI
 651   if (do_unloading_jvmci(is_alive, unloading_occurred)) {
 652     return postponed;
 653   }
 654 #endif
 655 
 656   // Ensure that all metadata is still alive
 657   verify_metadata_loaders(low_boundary, is_alive);
 658 
 659   return postponed;
 660 }
 661 
 662 void CompiledMethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) {
 663   ResourceMark rm;
 664 
 665   // Make sure the oop's ready to receive visitors
 666   assert(!is_zombie(),
 667          "should not call follow on zombie nmethod");
 668 
 669   // If the method is not entrant then a JMP is plastered over the
 670   // first few bytes.  If an oop in the old code was there, that oop
 671   // should not get GC'd.  Skip the first few bytes of oops on
 672   // not-entrant methods.
 673   address low_boundary = verified_entry_point();
 674   if (is_not_entrant()) {
 675     low_boundary += NativeJump::instruction_size;
 676     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
 677     // (See comment above.)
 678   }
 679 
 680   RelocIterator iter(this, low_boundary);
 681   while(iter.next()) {
 682 
 683     switch (iter.type()) {
 684 
 685     case relocInfo::virtual_call_type:
 686       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 687       break;
 688 
 689     case relocInfo::opt_virtual_call_type:
 690       clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
 691       break;
 692 
 693     case relocInfo::static_call_type:
 694       clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
 695       break;
 696 
 697     default:
 698       break;
 699     }
 700   }
 701 }