1 /*
   2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/metadataOnStackMark.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "code/debugInfoRec.hpp"
  29 #include "gc_interface/collectedHeap.inline.hpp"
  30 #include "interpreter/bytecodeStream.hpp"
  31 #include "interpreter/bytecodeTracer.hpp"
  32 #include "interpreter/bytecodes.hpp"
  33 #include "interpreter/interpreter.hpp"
  34 #include "interpreter/oopMapCache.hpp"
  35 #include "memory/gcLocker.hpp"
  36 #include "memory/generation.hpp"
  37 #include "memory/heapInspection.hpp"
  38 #include "memory/metadataFactory.hpp"
  39 #include "memory/oopFactory.hpp"
  40 #include "oops/constMethod.hpp"
  41 #include "oops/methodData.hpp"
  42 #include "oops/method.hpp"
  43 #include "oops/oop.inline.hpp"
  44 #include "oops/symbol.hpp"
  45 #include "prims/jvmtiExport.hpp"
  46 #include "prims/methodHandles.hpp"
  47 #include "prims/nativeLookup.hpp"
  48 #include "runtime/arguments.hpp"
  49 #include "runtime/compilationPolicy.hpp"
  50 #include "runtime/frame.inline.hpp"
  51 #include "runtime/handles.inline.hpp"
  52 #include "runtime/orderAccess.inline.hpp"
  53 #include "runtime/relocator.hpp"
  54 #include "runtime/sharedRuntime.hpp"
  55 #include "runtime/signature.hpp"
  56 #include "utilities/quickSort.hpp"
  57 #include "utilities/xmlstream.hpp"
  58 
  59 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  60 
  61 // Implementation of Method
  62 
  63 Method* Method::allocate(ClassLoaderData* loader_data,
  64                          int byte_code_size,
  65                          AccessFlags access_flags,
  66                          InlineTableSizes* sizes,
  67                          ConstMethod::MethodType method_type,
  68                          TRAPS) {
  69   assert(!access_flags.is_native() || byte_code_size == 0,
  70          "native methods should not contain byte codes");
  71   ConstMethod* cm = ConstMethod::allocate(loader_data,
  72                                           byte_code_size,
  73                                           sizes,
  74                                           method_type,
  75                                           CHECK_NULL);
  76 
  77   int size = Method::size(access_flags.is_native());
  78 
  79   return new (loader_data, size, false, MetaspaceObj::MethodType, THREAD) Method(cm, access_flags, size);
  80 }
  81 
  82 Method::Method(ConstMethod* xconst, AccessFlags access_flags, int size) {
  83   No_Safepoint_Verifier no_safepoint;
  84   set_constMethod(xconst);
  85   set_access_flags(access_flags);
  86   set_method_size(size);
  87 #ifdef CC_INTERP
  88   set_result_index(T_VOID);
  89 #endif
  90   set_intrinsic_id(vmIntrinsics::_none);
  91   set_jfr_towrite(false);
  92   set_force_inline(false);
  93   set_hidden(false);
  94   set_dont_inline(false);
  95   set_method_data(NULL);
  96   clear_method_counters();
  97   set_vtable_index(Method::garbage_vtable_index);
  98 
  99   // Fix and bury in Method*
 100   set_interpreter_entry(NULL); // sets i2i entry and from_int
 101   set_adapter_entry(NULL);
 102   clear_code(); // from_c/from_i get set to c2i/i2i
 103 
 104   if (access_flags.is_native()) {
 105     clear_native_function();
 106     set_signature_handler(NULL);
 107   }
 108 
 109   NOT_PRODUCT(set_compiled_invocation_count(0);)
 110 }
 111 
 112 // Release Method*.  The nmethod will be gone when we get here because
 113 // we've walked the code cache.
 114 void Method::deallocate_contents(ClassLoaderData* loader_data) {
 115   MetadataFactory::free_metadata(loader_data, constMethod());
 116   set_constMethod(NULL);
 117   MetadataFactory::free_metadata(loader_data, method_data());
 118   set_method_data(NULL);
 119   MetadataFactory::free_metadata(loader_data, method_counters());
 120   clear_method_counters();
 121   // The nmethod will be gone when we get here.
 122   if (code() != NULL) _code = NULL;
 123 }
 124 
 125 address Method::get_i2c_entry() {
 126   assert(_adapter != NULL, "must have");
 127   return _adapter->get_i2c_entry();
 128 }
 129 
 130 address Method::get_c2i_entry() {
 131   assert(_adapter != NULL, "must have");
 132   return _adapter->get_c2i_entry();
 133 }
 134 
 135 address Method::get_c2i_unverified_entry() {
 136   assert(_adapter != NULL, "must have");
 137   return _adapter->get_c2i_unverified_entry();
 138 }
 139 
 140 char* Method::name_and_sig_as_C_string() const {
 141   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature());
 142 }
 143 
 144 char* Method::name_and_sig_as_C_string(char* buf, int size) const {
 145   return name_and_sig_as_C_string(constants()->pool_holder(), name(), signature(), buf, size);
 146 }
 147 
 148 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
 149   const char* klass_name = klass->external_name();
 150   int klass_name_len  = (int)strlen(klass_name);
 151   int method_name_len = method_name->utf8_length();
 152   int len             = klass_name_len + 1 + method_name_len + signature->utf8_length();
 153   char* dest          = NEW_RESOURCE_ARRAY(char, len + 1);
 154   strcpy(dest, klass_name);
 155   dest[klass_name_len] = '.';
 156   strcpy(&dest[klass_name_len + 1], method_name->as_C_string());
 157   strcpy(&dest[klass_name_len + 1 + method_name_len], signature->as_C_string());
 158   dest[len] = 0;
 159   return dest;
 160 }
 161 
 162 char* Method::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
 163   Symbol* klass_name = klass->name();
 164   klass_name->as_klass_external_name(buf, size);
 165   int len = (int)strlen(buf);
 166 
 167   if (len < size - 1) {
 168     buf[len++] = '.';
 169 
 170     method_name->as_C_string(&(buf[len]), size - len);
 171     len = (int)strlen(buf);
 172 
 173     signature->as_C_string(&(buf[len]), size - len);
 174   }
 175 
 176   return buf;
 177 }
 178 
 179 int Method::fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS) {
 180   // exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
 181   // access exception table
 182   ExceptionTable table(mh());
 183   int length = table.length();
 184   // iterate through all entries sequentially
 185   constantPoolHandle pool(THREAD, mh->constants());
 186   for (int i = 0; i < length; i ++) {
 187     //reacquire the table in case a GC happened
 188     ExceptionTable table(mh());
 189     int beg_bci = table.start_pc(i);
 190     int end_bci = table.end_pc(i);
 191     assert(beg_bci <= end_bci, "inconsistent exception table");
 192     if (beg_bci <= throw_bci && throw_bci < end_bci) {
 193       // exception handler bci range covers throw_bci => investigate further
 194       int handler_bci = table.handler_pc(i);
 195       int klass_index = table.catch_type_index(i);
 196       if (klass_index == 0) {
 197         return handler_bci;
 198       } else if (ex_klass.is_null()) {
 199         return handler_bci;
 200       } else {
 201         // we know the exception class => get the constraint class
 202         // this may require loading of the constraint class; if verification
 203         // fails or some other exception occurs, return handler_bci
 204         Klass* k = pool->klass_at(klass_index, CHECK_(handler_bci));
 205         KlassHandle klass = KlassHandle(THREAD, k);
 206         assert(klass.not_null(), "klass not loaded");
 207         if (ex_klass->is_subtype_of(klass())) {
 208           return handler_bci;
 209         }
 210       }
 211     }
 212   }
 213 
 214   return -1;
 215 }
 216 
 217 void Method::mask_for(int bci, InterpreterOopMap* mask) {
 218 
 219   Thread* myThread    = Thread::current();
 220   methodHandle h_this(myThread, this);
 221 #ifdef ASSERT
 222   bool has_capability = myThread->is_VM_thread() ||
 223                         myThread->is_ConcurrentGC_thread() ||
 224                         myThread->is_GC_task_thread();
 225 
 226   if (!has_capability) {
 227     if (!VerifyStack && !VerifyLastFrame) {
 228       // verify stack calls this outside VM thread
 229       warning("oopmap should only be accessed by the "
 230               "VM, GC task or CMS threads (or during debugging)");
 231       InterpreterOopMap local_mask;
 232       method_holder()->mask_for(h_this, bci, &local_mask);
 233       local_mask.print();
 234     }
 235   }
 236 #endif
 237   method_holder()->mask_for(h_this, bci, mask);
 238   return;
 239 }
 240 
 241 
 242 int Method::bci_from(address bcp) const {
 243   if (is_native() && bcp == 0) {
 244     return 0;
 245   }
 246 #ifdef ASSERT
 247   { ResourceMark rm;
 248   assert(is_native() && bcp == code_base() || contains(bcp) || is_error_reported(),
 249          err_msg("bcp doesn't belong to this method: bcp: " INTPTR_FORMAT ", method: %s", bcp, name_and_sig_as_C_string()));
 250   }
 251 #endif
 252   return bcp - code_base();
 253 }
 254 
 255 
 256 int Method::validate_bci(int bci) const {
 257   return (bci == 0 || bci < code_size()) ? bci : -1;
 258 }
 259 
 260 // Return bci if it appears to be a valid bcp
 261 // Return -1 otherwise.
 262 // Used by profiling code, when invalid data is a possibility.
 263 // The caller is responsible for validating the Method* itself.
 264 int Method::validate_bci_from_bcp(address bcp) const {
 265   // keep bci as -1 if not a valid bci
 266   int bci = -1;
 267   if (bcp == 0 || bcp == code_base()) {
 268     // code_size() may return 0 and we allow 0 here
 269     // the method may be native
 270     bci = 0;
 271   } else if (contains(bcp)) {
 272     bci = bcp - code_base();
 273   }
 274   // Assert that if we have dodged any asserts, bci is negative.
 275   assert(bci == -1 || bci == bci_from(bcp_from(bci)), "sane bci if >=0");
 276   return bci;
 277 }
 278 
 279 address Method::bcp_from(int bci) const {
 280   assert((is_native() && bci == 0)  || (!is_native() && 0 <= bci && bci < code_size()), err_msg("illegal bci: %d", bci));
 281   address bcp = code_base() + bci;
 282   assert(is_native() && bcp == code_base() || contains(bcp), "bcp doesn't belong to this method");
 283   return bcp;
 284 }
 285 
 286 address Method::bcp_from(address bcp) const {
 287   if (is_native() && bcp == NULL) {
 288     return code_base();
 289   } else {
 290     return bcp;
 291   }
 292 }
 293 
 294 int Method::size(bool is_native) {
 295   // If native, then include pointers for native_function and signature_handler
 296   int extra_bytes = (is_native) ? 2*sizeof(address*) : 0;
 297   int extra_words = align_size_up(extra_bytes, BytesPerWord) / BytesPerWord;
 298   return align_object_size(header_size() + extra_words);
 299 }
 300 
 301 
 302 Symbol* Method::klass_name() const {
 303   Klass* k = method_holder();
 304   assert(k->is_klass(), "must be klass");
 305   InstanceKlass* ik = (InstanceKlass*) k;
 306   return ik->name();
 307 }
 308 
 309 
 310 // Attempt to return method oop to original state.  Clear any pointers
 311 // (to objects outside the shared spaces).  We won't be able to predict
 312 // where they should point in a new JVM.  Further initialize some
 313 // entries now in order allow them to be write protected later.
 314 
 315 void Method::remove_unshareable_info() {
 316   unlink_method();
 317 }
 318 
 319 
 320 bool Method::was_executed_more_than(int n) {
 321   // Invocation counter is reset when the Method* is compiled.
 322   // If the method has compiled code we therefore assume it has
 323   // be excuted more than n times.
 324   if (is_accessor() || is_empty_method() || (code() != NULL)) {
 325     // interpreter doesn't bump invocation counter of trivial methods
 326     // compiler does not bump invocation counter of compiled methods
 327     return true;
 328   }
 329   else if ((method_counters() != NULL &&
 330             method_counters()->invocation_counter()->carry()) ||
 331            (method_data() != NULL &&
 332             method_data()->invocation_counter()->carry())) {
 333     // The carry bit is set when the counter overflows and causes
 334     // a compilation to occur.  We don't know how many times
 335     // the counter has been reset, so we simply assume it has
 336     // been executed more than n times.
 337     return true;
 338   } else {
 339     return invocation_count() > n;
 340   }
 341 }
 342 
 343 void Method::print_invocation_count() {
 344   if (is_static()) tty->print("static ");
 345   if (is_final()) tty->print("final ");
 346   if (is_synchronized()) tty->print("synchronized ");
 347   if (is_native()) tty->print("native ");
 348   tty->print("%s::", method_holder()->external_name());
 349   name()->print_symbol_on(tty);
 350   signature()->print_symbol_on(tty);
 351 
 352   if (WizardMode) {
 353     // dump the size of the byte codes
 354     tty->print(" {%d}", code_size());
 355   }
 356   tty->cr();
 357 
 358   tty->print_cr ("  interpreter_invocation_count: %8d ", interpreter_invocation_count());
 359   tty->print_cr ("  invocation_counter:           %8d ", invocation_count());
 360   tty->print_cr ("  backedge_counter:             %8d ", backedge_count());
 361 #ifndef PRODUCT
 362   if (CountCompiledCalls) {
 363     tty->print_cr ("  compiled_invocation_count: %8d ", compiled_invocation_count());
 364   }
 365 #endif
 366 }
 367 
 368 // Build a MethodData* object to hold information about this method
 369 // collected in the interpreter.
 370 void Method::build_interpreter_method_data(methodHandle method, TRAPS) {
 371   // Do not profile the method if metaspace has hit an OOM previously
 372   // allocating profiling data. Callers clear pending exception so don't
 373   // add one here.
 374   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 375     return;
 376   }
 377 
 378   // Do not profile method if current thread holds the pending list lock,
 379   // which avoids deadlock for acquiring the MethodData_lock.
 380   if (InstanceRefKlass::owns_pending_list_lock((JavaThread*)THREAD)) {
 381     return;
 382   }
 383 
 384   // Grab a lock here to prevent multiple
 385   // MethodData*s from being created.
 386   MutexLocker ml(MethodData_lock, THREAD);
 387   if (method->method_data() == NULL) {
 388     ClassLoaderData* loader_data = method->method_holder()->class_loader_data();
 389     MethodData* method_data = MethodData::allocate(loader_data, method, THREAD);
 390     if (HAS_PENDING_EXCEPTION) {
 391       CompileBroker::log_metaspace_failure();
 392       ClassLoaderDataGraph::set_metaspace_oom(true);
 393       return;   // return the exception (which is cleared)
 394     }
 395 
 396     method->set_method_data(method_data);
 397     if (PrintMethodData && (Verbose || WizardMode)) {
 398       ResourceMark rm(THREAD);
 399       tty->print("build_interpreter_method_data for ");
 400       method->print_name(tty);
 401       tty->cr();
 402       // At the end of the run, the MDO, full of data, will be dumped.
 403     }
 404   }
 405 }
 406 
 407 MethodCounters* Method::build_method_counters(Method* m, TRAPS) {
 408   // Do not profile the method if metaspace has hit an OOM previously
 409   if (ClassLoaderDataGraph::has_metaspace_oom()) {
 410     return NULL;
 411   }
 412 
 413   methodHandle mh(m);
 414   ClassLoaderData* loader_data = mh->method_holder()->class_loader_data();
 415   MethodCounters* counters = MethodCounters::allocate(loader_data, THREAD);
 416   if (HAS_PENDING_EXCEPTION) {
 417     CompileBroker::log_metaspace_failure();
 418     ClassLoaderDataGraph::set_metaspace_oom(true);
 419     return NULL;   // return the exception (which is cleared)
 420   }
 421   if (!mh->init_method_counters(counters)) {
 422     MetadataFactory::free_metadata(loader_data, counters);
 423   }
 424   return mh->method_counters();
 425 }
 426 
 427 void Method::cleanup_inline_caches() {
 428   // The current system doesn't use inline caches in the interpreter
 429   // => nothing to do (keep this method around for future use)
 430 }
 431 
 432 
 433 int Method::extra_stack_words() {
 434   // not an inline function, to avoid a header dependency on Interpreter
 435   return extra_stack_entries() * Interpreter::stackElementSize;
 436 }
 437 
 438 
 439 void Method::compute_size_of_parameters(Thread *thread) {
 440   ArgumentSizeComputer asc(signature());
 441   set_size_of_parameters(asc.size() + (is_static() ? 0 : 1));
 442 }
 443 
 444 #ifdef CC_INTERP
 445 void Method::set_result_index(BasicType type)          {
 446   _result_index = Interpreter::BasicType_as_index(type);
 447 }
 448 #endif
 449 
 450 BasicType Method::result_type() const {
 451   ResultTypeFinder rtf(signature());
 452   return rtf.type();
 453 }
 454 
 455 
 456 bool Method::is_empty_method() const {
 457   return  code_size() == 1
 458       && *code_base() == Bytecodes::_return;
 459 }
 460 
 461 
 462 bool Method::is_vanilla_constructor() const {
 463   // Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
 464   // which only calls the superclass vanilla constructor and possibly does stores of
 465   // zero constants to local fields:
 466   //
 467   //   aload_0
 468   //   invokespecial
 469   //   indexbyte1
 470   //   indexbyte2
 471   //
 472   // followed by an (optional) sequence of:
 473   //
 474   //   aload_0
 475   //   aconst_null / iconst_0 / fconst_0 / dconst_0
 476   //   putfield
 477   //   indexbyte1
 478   //   indexbyte2
 479   //
 480   // followed by:
 481   //
 482   //   return
 483 
 484   assert(name() == vmSymbols::object_initializer_name(),    "Should only be called for default constructors");
 485   assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
 486   int size = code_size();
 487   // Check if size match
 488   if (size == 0 || size % 5 != 0) return false;
 489   address cb = code_base();
 490   int last = size - 1;
 491   if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
 492     // Does not call superclass default constructor
 493     return false;
 494   }
 495   // Check optional sequence
 496   for (int i = 4; i < last; i += 5) {
 497     if (cb[i] != Bytecodes::_aload_0) return false;
 498     if (!Bytecodes::is_zero_const(Bytecodes::cast(cb[i+1]))) return false;
 499     if (cb[i+2] != Bytecodes::_putfield) return false;
 500   }
 501   return true;
 502 }
 503 
 504 
 505 bool Method::compute_has_loops_flag() {
 506   BytecodeStream bcs(this);
 507   Bytecodes::Code bc;
 508 
 509   while ((bc = bcs.next()) >= 0) {
 510     switch( bc ) {
 511       case Bytecodes::_ifeq:
 512       case Bytecodes::_ifnull:
 513       case Bytecodes::_iflt:
 514       case Bytecodes::_ifle:
 515       case Bytecodes::_ifne:
 516       case Bytecodes::_ifnonnull:
 517       case Bytecodes::_ifgt:
 518       case Bytecodes::_ifge:
 519       case Bytecodes::_if_icmpeq:
 520       case Bytecodes::_if_icmpne:
 521       case Bytecodes::_if_icmplt:
 522       case Bytecodes::_if_icmpgt:
 523       case Bytecodes::_if_icmple:
 524       case Bytecodes::_if_icmpge:
 525       case Bytecodes::_if_acmpeq:
 526       case Bytecodes::_if_acmpne:
 527       case Bytecodes::_goto:
 528       case Bytecodes::_jsr:
 529         if( bcs.dest() < bcs.next_bci() ) _access_flags.set_has_loops();
 530         break;
 531 
 532       case Bytecodes::_goto_w:
 533       case Bytecodes::_jsr_w:
 534         if( bcs.dest_w() < bcs.next_bci() ) _access_flags.set_has_loops();
 535         break;
 536     }
 537   }
 538   _access_flags.set_loops_flag_init();
 539   return _access_flags.has_loops();
 540 }
 541 
 542 bool Method::is_final_method(AccessFlags class_access_flags) const {
 543   // or "does_not_require_vtable_entry"
 544   // default method or overpass can occur, is not final (reuses vtable entry)
 545   // private methods get vtable entries for backward class compatibility.
 546   if (is_overpass() || is_default_method())  return false;
 547   return is_final() || class_access_flags.is_final();
 548 }
 549 
 550 bool Method::is_final_method() const {
 551   return is_final_method(method_holder()->access_flags());
 552 }
 553 
 554 bool Method::is_default_method() const {
 555   if (method_holder() != NULL &&
 556       method_holder()->is_interface() &&
 557       !is_abstract()) {
 558     return true;
 559   } else {
 560     return false;
 561   }
 562 }
 563 
 564 bool Method::can_be_statically_bound(AccessFlags class_access_flags) const {
 565   if (is_final_method(class_access_flags))  return true;
 566 #ifdef ASSERT
 567   ResourceMark rm;
 568   bool is_nonv = (vtable_index() == nonvirtual_vtable_index);
 569   if (class_access_flags.is_interface()) {
 570       assert(is_nonv == is_static(), err_msg("is_nonv=%s", name_and_sig_as_C_string()));
 571   }
 572 #endif
 573   assert(valid_vtable_index() || valid_itable_index(), "method must be linked before we ask this question");
 574   return vtable_index() == nonvirtual_vtable_index;
 575 }
 576 
 577 bool Method::can_be_statically_bound() const {
 578   return can_be_statically_bound(method_holder()->access_flags());
 579 }
 580 
 581 bool Method::is_accessor() const {
 582   if (code_size() != 5) return false;
 583   if (size_of_parameters() != 1) return false;
 584   if (java_code_at(0) != Bytecodes::_aload_0 ) return false;
 585   if (java_code_at(1) != Bytecodes::_getfield) return false;
 586   if (java_code_at(4) != Bytecodes::_areturn &&
 587       java_code_at(4) != Bytecodes::_ireturn ) return false;
 588   return true;
 589 }
 590 
 591 bool Method::is_constant_getter() const {
 592   int last_index = code_size() - 1;
 593   // Check if the first 1-3 bytecodes are a constant push
 594   // and the last bytecode is a return.
 595   return (2 <= code_size() && code_size() <= 4 &&
 596           Bytecodes::is_const(java_code_at(0)) &&
 597           Bytecodes::length_for(java_code_at(0)) == last_index &&
 598           Bytecodes::is_return(java_code_at(last_index)));
 599 }
 600 
 601 bool Method::is_initializer() const {
 602   return name() == vmSymbols::object_initializer_name() || is_static_initializer();
 603 }
 604 
 605 bool Method::has_valid_initializer_flags() const {
 606   return (is_static() ||
 607           method_holder()->major_version() < 51);
 608 }
 609 
 610 bool Method::is_static_initializer() const {
 611   // For classfiles version 51 or greater, ensure that the clinit method is
 612   // static.  Non-static methods with the name "<clinit>" are not static
 613   // initializers. (older classfiles exempted for backward compatibility)
 614   return name() == vmSymbols::class_initializer_name() &&
 615          has_valid_initializer_flags();
 616 }
 617 
 618 
 619 objArrayHandle Method::resolved_checked_exceptions_impl(Method* method, TRAPS) {
 620   int length = method->checked_exceptions_length();
 621   if (length == 0) {  // common case
 622     return objArrayHandle(THREAD, Universe::the_empty_class_klass_array());
 623   } else {
 624     methodHandle h_this(THREAD, method);
 625     objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));
 626     objArrayHandle mirrors (THREAD, m_oop);
 627     for (int i = 0; i < length; i++) {
 628       CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
 629       Klass* k = h_this->constants()->klass_at(table[i].class_cp_index, CHECK_(objArrayHandle()));
 630       assert(k->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");
 631       mirrors->obj_at_put(i, k->java_mirror());
 632     }
 633     return mirrors;
 634   }
 635 };
 636 
 637 
 638 int Method::line_number_from_bci(int bci) const {
 639   if (bci == SynchronizationEntryBCI) bci = 0;
 640   assert(bci == 0 || 0 <= bci && bci < code_size(), "illegal bci");
 641   int best_bci  =  0;
 642   int best_line = -1;
 643 
 644   if (has_linenumber_table()) {
 645     // The line numbers are a short array of 2-tuples [start_pc, line_number].
 646     // Not necessarily sorted and not necessarily one-to-one.
 647     CompressedLineNumberReadStream stream(compressed_linenumber_table());
 648     while (stream.read_pair()) {
 649       if (stream.bci() == bci) {
 650         // perfect match
 651         return stream.line();
 652       } else {
 653         // update best_bci/line
 654         if (stream.bci() < bci && stream.bci() >= best_bci) {
 655           best_bci  = stream.bci();
 656           best_line = stream.line();
 657         }
 658       }
 659     }
 660   }
 661   return best_line;
 662 }
 663 
 664 
 665 bool Method::is_klass_loaded_by_klass_index(int klass_index) const {
 666   if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
 667     Thread *thread = Thread::current();
 668     Symbol* klass_name = constants()->klass_name_at(klass_index);
 669     Handle loader(thread, method_holder()->class_loader());
 670     Handle prot  (thread, method_holder()->protection_domain());
 671     return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;
 672   } else {
 673     return true;
 674   }
 675 }
 676 
 677 
 678 bool Method::is_klass_loaded(int refinfo_index, bool must_be_resolved) const {
 679   int klass_index = constants()->klass_ref_index_at(refinfo_index);
 680   if (must_be_resolved) {
 681     // Make sure klass is resolved in constantpool.
 682     if (constants()->tag_at(klass_index).is_unresolved_klass()) return false;
 683   }
 684   return is_klass_loaded_by_klass_index(klass_index);
 685 }
 686 
 687 
 688 void Method::set_native_function(address function, bool post_event_flag) {
 689   assert(function != NULL, "use clear_native_function to unregister natives");
 690   assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
 691   address* native_function = native_function_addr();
 692 
 693   // We can see racers trying to place the same native function into place. Once
 694   // is plenty.
 695   address current = *native_function;
 696   if (current == function) return;
 697   if (post_event_flag && JvmtiExport::should_post_native_method_bind() &&
 698       function != NULL) {
 699     // native_method_throw_unsatisfied_link_error_entry() should only
 700     // be passed when post_event_flag is false.
 701     assert(function !=
 702       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
 703       "post_event_flag mis-match");
 704 
 705     // post the bind event, and possible change the bind function
 706     JvmtiExport::post_native_method_bind(this, &function);
 707   }
 708   *native_function = function;
 709   // This function can be called more than once. We must make sure that we always
 710   // use the latest registered method -> check if a stub already has been generated.
 711   // If so, we have to make it not_entrant.
 712   nmethod* nm = code(); // Put it into local variable to guard against concurrent updates
 713   if (nm != NULL) {
 714     nm->make_not_entrant();
 715   }
 716 }
 717 
 718 
 719 bool Method::has_native_function() const {
 720   if (is_method_handle_intrinsic())
 721     return false;  // special-cased in SharedRuntime::generate_native_wrapper
 722   address func = native_function();
 723   return (func != NULL && func != SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
 724 }
 725 
 726 
 727 void Method::clear_native_function() {
 728   // Note: is_method_handle_intrinsic() is allowed here.
 729   set_native_function(
 730     SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
 731     !native_bind_event_is_interesting);
 732   clear_code();
 733 }
 734 
 735 address Method::critical_native_function() {
 736   methodHandle mh(this);
 737   return NativeLookup::lookup_critical_entry(mh);
 738 }
 739 
 740 
 741 void Method::set_signature_handler(address handler) {
 742   address* signature_handler =  signature_handler_addr();
 743   *signature_handler = handler;
 744 }
 745 
 746 
 747 void Method::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
 748   if (PrintCompilation && report) {
 749     ttyLocker ttyl;
 750     tty->print("made not %scompilable on ", is_osr ? "OSR " : "");
 751     if (comp_level == CompLevel_all) {
 752       tty->print("all levels ");
 753     } else {
 754       tty->print("levels ");
 755       for (int i = (int)CompLevel_none; i <= comp_level; i++) {
 756         tty->print("%d ", i);
 757       }
 758     }
 759     this->print_short_name(tty);
 760     int size = this->code_size();
 761     if (size > 0) {
 762       tty->print(" (%d bytes)", size);
 763     }
 764     if (reason != NULL) {
 765       tty->print("   %s", reason);
 766     }
 767     tty->cr();
 768   }
 769   if ((TraceDeoptimization || LogCompilation) && (xtty != NULL)) {
 770     ttyLocker ttyl;
 771     xtty->begin_elem("make_not_compilable thread='" UINTX_FORMAT "' osr='%d' level='%d'",
 772                      os::current_thread_id(), is_osr, comp_level);
 773     if (reason != NULL) {
 774       xtty->print(" reason=\'%s\'", reason);
 775     }
 776     xtty->method(this);
 777     xtty->stamp();
 778     xtty->end_elem();
 779   }
 780 }
 781 
 782 bool Method::is_always_compilable() const {
 783   // Generated adapters must be compiled
 784   if (is_method_handle_intrinsic() && is_synthetic()) {
 785     assert(!is_not_c1_compilable(), "sanity check");
 786     assert(!is_not_c2_compilable(), "sanity check");
 787     return true;
 788   }
 789 
 790   return false;
 791 }
 792 
 793 bool Method::is_not_compilable(int comp_level) const {
 794   if (number_of_breakpoints() > 0)
 795     return true;
 796   if (is_always_compilable())
 797     return false;
 798   if (comp_level == CompLevel_any)
 799     return is_not_c1_compilable() || is_not_c2_compilable();
 800   if (is_c1_compile(comp_level))
 801     return is_not_c1_compilable();
 802   if (is_c2_compile(comp_level))
 803     return is_not_c2_compilable();
 804   return false;
 805 }
 806 
 807 // call this when compiler finds that this method is not compilable
 808 void Method::set_not_compilable(int comp_level, bool report, const char* reason) {
 809   if (is_always_compilable()) {
 810     // Don't mark a method which should be always compilable
 811     return;
 812   }
 813   print_made_not_compilable(comp_level, /*is_osr*/ false, report, reason);
 814   if (comp_level == CompLevel_all) {
 815     set_not_c1_compilable();
 816     set_not_c2_compilable();
 817   } else {
 818     if (is_c1_compile(comp_level))
 819       set_not_c1_compilable();
 820     if (is_c2_compile(comp_level))
 821       set_not_c2_compilable();
 822   }
 823   CompilationPolicy::policy()->disable_compilation(this);
 824   assert(!CompilationPolicy::can_be_compiled(this, comp_level), "sanity check");
 825 }
 826 
 827 bool Method::is_not_osr_compilable(int comp_level) const {
 828   if (is_not_compilable(comp_level))
 829     return true;
 830   if (comp_level == CompLevel_any)
 831     return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
 832   if (is_c1_compile(comp_level))
 833     return is_not_c1_osr_compilable();
 834   if (is_c2_compile(comp_level))
 835     return is_not_c2_osr_compilable();
 836   return false;
 837 }
 838 
 839 void Method::set_not_osr_compilable(int comp_level, bool report, const char* reason) {
 840   print_made_not_compilable(comp_level, /*is_osr*/ true, report, reason);
 841   if (comp_level == CompLevel_all) {
 842     set_not_c1_osr_compilable();
 843     set_not_c2_osr_compilable();
 844   } else {
 845     if (is_c1_compile(comp_level))
 846       set_not_c1_osr_compilable();
 847     if (is_c2_compile(comp_level))
 848       set_not_c2_osr_compilable();
 849   }
 850   CompilationPolicy::policy()->disable_compilation(this);
 851   assert(!CompilationPolicy::can_be_osr_compiled(this, comp_level), "sanity check");
 852 }
 853 
 854 // Revert to using the interpreter and clear out the nmethod
 855 void Method::clear_code() {
 856 
 857   // this may be NULL if c2i adapters have not been made yet
 858   // Only should happen at allocate time.
 859   if (_adapter == NULL) {
 860     _from_compiled_entry    = NULL;
 861   } else {
 862     _from_compiled_entry    = _adapter->get_c2i_entry();
 863   }
 864   OrderAccess::storestore();
 865   _from_interpreted_entry = _i2i_entry;
 866   OrderAccess::storestore();
 867   _code = NULL;
 868 }
 869 
 870 // Called by class data sharing to remove any entry points (which are not shared)
 871 void Method::unlink_method() {
 872   _code = NULL;
 873   _i2i_entry = NULL;
 874   _from_interpreted_entry = NULL;
 875   if (is_native()) {
 876     *native_function_addr() = NULL;
 877     set_signature_handler(NULL);
 878   }
 879   NOT_PRODUCT(set_compiled_invocation_count(0);)
 880   _adapter = NULL;
 881   _from_compiled_entry = NULL;
 882 
 883   // In case of DumpSharedSpaces, _method_data should always be NULL.
 884   //
 885   // During runtime (!DumpSharedSpaces), when we are cleaning a
 886   // shared class that failed to load, this->link_method() may
 887   // have already been called (before an exception happened), so
 888   // this->_method_data may not be NULL.
 889   assert(!DumpSharedSpaces || _method_data == NULL, "unexpected method data?");
 890 
 891   set_method_data(NULL);
 892   clear_method_counters();
 893 }
 894 
 895 // Called when the method_holder is getting linked. Setup entrypoints so the method
 896 // is ready to be called from interpreter, compiler, and vtables.
 897 void Method::link_method(methodHandle h_method, TRAPS) {
 898   // If the code cache is full, we may reenter this function for the
 899   // leftover methods that weren't linked.
 900   if (_i2i_entry != NULL) return;
 901 
 902   assert(_adapter == NULL, "init'd to NULL" );
 903   assert( _code == NULL, "nothing compiled yet" );
 904 
 905   // Setup interpreter entrypoint
 906   assert(this == h_method(), "wrong h_method()" );
 907   address entry = Interpreter::entry_for_method(h_method);
 908   assert(entry != NULL, "interpreter entry must be non-null");
 909   // Sets both _i2i_entry and _from_interpreted_entry
 910   set_interpreter_entry(entry);
 911 
 912   // Don't overwrite already registered native entries.
 913   if (is_native() && !has_native_function()) {
 914     set_native_function(
 915       SharedRuntime::native_method_throw_unsatisfied_link_error_entry(),
 916       !native_bind_event_is_interesting);
 917   }
 918 
 919   // Setup compiler entrypoint.  This is made eagerly, so we do not need
 920   // special handling of vtables.  An alternative is to make adapters more
 921   // lazily by calling make_adapter() from from_compiled_entry() for the
 922   // normal calls.  For vtable calls life gets more complicated.  When a
 923   // call-site goes mega-morphic we need adapters in all methods which can be
 924   // called from the vtable.  We need adapters on such methods that get loaded
 925   // later.  Ditto for mega-morphic itable calls.  If this proves to be a
 926   // problem we'll make these lazily later.
 927   (void) make_adapters(h_method, CHECK);
 928 
 929   // ONLY USE the h_method now as make_adapter may have blocked
 930 
 931 }
 932 
 933 address Method::make_adapters(methodHandle mh, TRAPS) {
 934   // Adapters for compiled code are made eagerly here.  They are fairly
 935   // small (generally < 100 bytes) and quick to make (and cached and shared)
 936   // so making them eagerly shouldn't be too expensive.
 937   AdapterHandlerEntry* adapter = AdapterHandlerLibrary::get_adapter(mh);
 938   if (adapter == NULL ) {
 939     THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "out of space in CodeCache for adapters");
 940   }
 941 
 942   mh->set_adapter_entry(adapter);
 943   mh->_from_compiled_entry = adapter->get_c2i_entry();
 944   return adapter->get_c2i_entry();
 945 }
 946 
 947 void Method::restore_unshareable_info(TRAPS) {
 948   // Since restore_unshareable_info can be called more than once for a method, don't
 949   // redo any work.   If this field is restored, there is nothing to do.
 950   if (_from_compiled_entry == NULL) {
 951     // restore method's vtable by calling a virtual function
 952     restore_vtable();
 953 
 954     methodHandle mh(THREAD, this);
 955     link_method(mh, CHECK);
 956   }
 957 }
 958 
 959 
 960 // The verified_code_entry() must be called when a invoke is resolved
 961 // on this method.
 962 
 963 // It returns the compiled code entry point, after asserting not null.
 964 // This function is called after potential safepoints so that nmethod
 965 // or adapter that it points to is still live and valid.
 966 // This function must not hit a safepoint!
 967 address Method::verified_code_entry() {
 968   debug_only(No_Safepoint_Verifier nsv;)
 969   assert(_from_compiled_entry != NULL, "must be set");
 970   return _from_compiled_entry;
 971 }
 972 
 973 // Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
 974 // (could be racing a deopt).
 975 // Not inline to avoid circular ref.
 976 bool Method::check_code() const {
 977   // cached in a register or local.  There's a race on the value of the field.
 978   nmethod *code = (nmethod *)OrderAccess::load_ptr_acquire(&_code);
 979   return code == NULL || (code->method() == NULL) || (code->method() == (Method*)this && !code->is_osr_method());
 980 }
 981 
 982 // Install compiled code.  Instantly it can execute.
 983 void Method::set_code(methodHandle mh, nmethod *code) {
 984   assert( code, "use clear_code to remove code" );
 985   assert( mh->check_code(), "" );
 986 
 987   guarantee(mh->adapter() != NULL, "Adapter blob must already exist!");
 988 
 989   // These writes must happen in this order, because the interpreter will
 990   // directly jump to from_interpreted_entry which jumps to an i2c adapter
 991   // which jumps to _from_compiled_entry.
 992   mh->_code = code;             // Assign before allowing compiled code to exec
 993 
 994   int comp_level = code->comp_level();
 995   // In theory there could be a race here. In practice it is unlikely
 996   // and not worth worrying about.
 997   if (comp_level > mh->highest_comp_level()) {
 998     mh->set_highest_comp_level(comp_level);
 999   }
1000 
1001   OrderAccess::storestore();
1002 #ifdef SHARK
1003   mh->_from_interpreted_entry = code->insts_begin();
1004 #else //!SHARK
1005   mh->_from_compiled_entry = code->verified_entry_point();
1006   OrderAccess::storestore();
1007   // Instantly compiled code can execute.
1008   if (!mh->is_method_handle_intrinsic())
1009     mh->_from_interpreted_entry = mh->get_i2c_entry();
1010 #endif //!SHARK
1011 }
1012 
1013 
1014 bool Method::is_overridden_in(Klass* k) const {
1015   InstanceKlass* ik = InstanceKlass::cast(k);
1016 
1017   if (ik->is_interface()) return false;
1018 
1019   // If method is an interface, we skip it - except if it
1020   // is a miranda method
1021   if (method_holder()->is_interface()) {
1022     // Check that method is not a miranda method
1023     if (ik->lookup_method(name(), signature()) == NULL) {
1024       // No implementation exist - so miranda method
1025       return false;
1026     }
1027     return true;
1028   }
1029 
1030   assert(ik->is_subclass_of(method_holder()), "should be subklass");
1031   assert(ik->vtable() != NULL, "vtable should exist");
1032   if (!has_vtable_index()) {
1033     return false;
1034   } else {
1035     Method* vt_m = ik->method_at_vtable(vtable_index());
1036     return vt_m != this;
1037   }
1038 }
1039 
1040 
1041 // give advice about whether this Method* should be cached or not
1042 bool Method::should_not_be_cached() const {
1043   if (is_old()) {
1044     // This method has been redefined. It is either EMCP or obsolete
1045     // and we don't want to cache it because that would pin the method
1046     // down and prevent it from being collectible if and when it
1047     // finishes executing.
1048     return true;
1049   }
1050 
1051   // caching this method should be just fine
1052   return false;
1053 }
1054 
1055 
1056 /**
1057  *  Returns true if this is one of the specially treated methods for
1058  *  security related stack walks (like Reflection.getCallerClass).
1059  */
1060 bool Method::is_ignored_by_security_stack_walk() const {
1061   if (intrinsic_id() == vmIntrinsics::_invoke) {
1062     // This is Method.invoke() -- ignore it
1063     return true;
1064   }
1065   if (method_holder()->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass())) {
1066     // This is an auxilary frame -- ignore it
1067     return true;
1068   }
1069   if (is_method_handle_intrinsic() || is_compiled_lambda_form()) {
1070     // This is an internal adapter frame for method handles -- ignore it
1071     return true;
1072   }
1073   return false;
1074 }
1075 
1076 
1077 // Constant pool structure for invoke methods:
1078 enum {
1079   _imcp_invoke_name = 1,        // utf8: 'invokeExact', etc.
1080   _imcp_invoke_signature,       // utf8: (variable Symbol*)
1081   _imcp_limit
1082 };
1083 
1084 // Test if this method is an MH adapter frame generated by Java code.
1085 // Cf. java/lang/invoke/InvokerBytecodeGenerator
1086 bool Method::is_compiled_lambda_form() const {
1087   return intrinsic_id() == vmIntrinsics::_compiledLambdaForm;
1088 }
1089 
1090 // Test if this method is an internal MH primitive method.
1091 bool Method::is_method_handle_intrinsic() const {
1092   vmIntrinsics::ID iid = intrinsic_id();
1093   return (MethodHandles::is_signature_polymorphic(iid) &&
1094           MethodHandles::is_signature_polymorphic_intrinsic(iid));
1095 }
1096 
1097 bool Method::has_member_arg() const {
1098   vmIntrinsics::ID iid = intrinsic_id();
1099   return (MethodHandles::is_signature_polymorphic(iid) &&
1100           MethodHandles::has_member_arg(iid));
1101 }
1102 
1103 // Make an instance of a signature-polymorphic internal MH primitive.
1104 methodHandle Method::make_method_handle_intrinsic(vmIntrinsics::ID iid,
1105                                                          Symbol* signature,
1106                                                          TRAPS) {
1107   ResourceMark rm;
1108   methodHandle empty;
1109 
1110   KlassHandle holder = SystemDictionary::MethodHandle_klass();
1111   Symbol* name = MethodHandles::signature_polymorphic_intrinsic_name(iid);
1112   assert(iid == MethodHandles::signature_polymorphic_name_id(name), "");
1113   if (TraceMethodHandles) {
1114     tty->print_cr("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
1115   }
1116 
1117   // invariant:   cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
1118   name->increment_refcount();
1119   signature->increment_refcount();
1120 
1121   int cp_length = _imcp_limit;
1122   ClassLoaderData* loader_data = holder->class_loader_data();
1123   constantPoolHandle cp;
1124   {
1125     ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
1126     cp = constantPoolHandle(THREAD, cp_oop);
1127   }
1128   cp->set_pool_holder(InstanceKlass::cast(holder()));
1129   cp->symbol_at_put(_imcp_invoke_name,       name);
1130   cp->symbol_at_put(_imcp_invoke_signature,  signature);
1131   cp->set_has_preresolution();
1132 
1133   // decide on access bits:  public or not?
1134   int flags_bits = (JVM_ACC_NATIVE | JVM_ACC_SYNTHETIC | JVM_ACC_FINAL);
1135   bool must_be_static = MethodHandles::is_signature_polymorphic_static(iid);
1136   if (must_be_static)  flags_bits |= JVM_ACC_STATIC;
1137   assert((flags_bits & JVM_ACC_PUBLIC) == 0, "do not expose these methods");
1138 
1139   methodHandle m;
1140   {
1141     InlineTableSizes sizes;
1142     Method* m_oop = Method::allocate(loader_data, 0,
1143                                      accessFlags_from(flags_bits), &sizes,
1144                                      ConstMethod::NORMAL, CHECK_(empty));
1145     m = methodHandle(THREAD, m_oop);
1146   }
1147   m->set_constants(cp());
1148   m->set_name_index(_imcp_invoke_name);
1149   m->set_signature_index(_imcp_invoke_signature);
1150   assert(MethodHandles::is_signature_polymorphic_name(m->name()), "");
1151   assert(m->signature() == signature, "");
1152 #ifdef CC_INTERP
1153   ResultTypeFinder rtf(signature);
1154   m->set_result_index(rtf.type());
1155 #endif
1156   m->compute_size_of_parameters(THREAD);
1157   m->init_intrinsic_id();
1158   assert(m->is_method_handle_intrinsic(), "");
1159 #ifdef ASSERT
1160   if (!MethodHandles::is_signature_polymorphic(m->intrinsic_id()))  m->print();
1161   assert(MethodHandles::is_signature_polymorphic(m->intrinsic_id()), "must be an invoker");
1162   assert(m->intrinsic_id() == iid, "correctly predicted iid");
1163 #endif //ASSERT
1164 
1165   // Finally, set up its entry points.
1166   assert(m->can_be_statically_bound(), "");
1167   m->set_vtable_index(Method::nonvirtual_vtable_index);
1168   m->link_method(m, CHECK_(empty));
1169 
1170   if (TraceMethodHandles && (Verbose || WizardMode))
1171     m->print_on(tty);
1172 
1173   return m;
1174 }
1175 
1176 Klass* Method::check_non_bcp_klass(Klass* klass) {
1177   if (klass != NULL && klass->class_loader() != NULL) {
1178     if (klass->oop_is_objArray())
1179       klass = ObjArrayKlass::cast(klass)->bottom_klass();
1180     return klass;
1181   }
1182   return NULL;
1183 }
1184 
1185 
1186 methodHandle Method::clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,
1187                                                 u_char* new_compressed_linenumber_table, int new_compressed_linenumber_size, TRAPS) {
1188   // Code below does not work for native methods - they should never get rewritten anyway
1189   assert(!m->is_native(), "cannot rewrite native methods");
1190   // Allocate new Method*
1191   AccessFlags flags = m->access_flags();
1192 
1193   ConstMethod* cm = m->constMethod();
1194   int checked_exceptions_len = cm->checked_exceptions_length();
1195   int localvariable_len = cm->localvariable_table_length();
1196   int exception_table_len = cm->exception_table_length();
1197   int method_parameters_len = cm->method_parameters_length();
1198   int method_annotations_len = cm->method_annotations_length();
1199   int parameter_annotations_len = cm->parameter_annotations_length();
1200   int type_annotations_len = cm->type_annotations_length();
1201   int default_annotations_len = cm->default_annotations_length();
1202 
1203   InlineTableSizes sizes(
1204       localvariable_len,
1205       new_compressed_linenumber_size,
1206       exception_table_len,
1207       checked_exceptions_len,
1208       method_parameters_len,
1209       cm->generic_signature_index(),
1210       method_annotations_len,
1211       parameter_annotations_len,
1212       type_annotations_len,
1213       default_annotations_len,
1214       0);
1215 
1216   ClassLoaderData* loader_data = m->method_holder()->class_loader_data();
1217   Method* newm_oop = Method::allocate(loader_data,
1218                                       new_code_length,
1219                                       flags,
1220                                       &sizes,
1221                                       m->method_type(),
1222                                       CHECK_(methodHandle()));
1223   methodHandle newm (THREAD, newm_oop);
1224   int new_method_size = newm->method_size();
1225 
1226   // Create a shallow copy of Method part, but be careful to preserve the new ConstMethod*
1227   ConstMethod* newcm = newm->constMethod();
1228   int new_const_method_size = newm->constMethod()->size();
1229 
1230   memcpy(newm(), m(), sizeof(Method));
1231 
1232   // Create shallow copy of ConstMethod.
1233   memcpy(newcm, m->constMethod(), sizeof(ConstMethod));
1234 
1235   // Reset correct method/const method, method size, and parameter info
1236   newm->set_constMethod(newcm);
1237   newm->constMethod()->set_code_size(new_code_length);
1238   newm->constMethod()->set_constMethod_size(new_const_method_size);
1239   newm->set_method_size(new_method_size);
1240   assert(newm->code_size() == new_code_length, "check");
1241   assert(newm->method_parameters_length() == method_parameters_len, "check");
1242   assert(newm->checked_exceptions_length() == checked_exceptions_len, "check");
1243   assert(newm->exception_table_length() == exception_table_len, "check");
1244   assert(newm->localvariable_table_length() == localvariable_len, "check");
1245   // Copy new byte codes
1246   memcpy(newm->code_base(), new_code, new_code_length);
1247   // Copy line number table
1248   if (new_compressed_linenumber_size > 0) {
1249     memcpy(newm->compressed_linenumber_table(),
1250            new_compressed_linenumber_table,
1251            new_compressed_linenumber_size);
1252   }
1253   // Copy method_parameters
1254   if (method_parameters_len > 0) {
1255     memcpy(newm->method_parameters_start(),
1256            m->method_parameters_start(),
1257            method_parameters_len * sizeof(MethodParametersElement));
1258   }
1259   // Copy checked_exceptions
1260   if (checked_exceptions_len > 0) {
1261     memcpy(newm->checked_exceptions_start(),
1262            m->checked_exceptions_start(),
1263            checked_exceptions_len * sizeof(CheckedExceptionElement));
1264   }
1265   // Copy exception table
1266   if (exception_table_len > 0) {
1267     memcpy(newm->exception_table_start(),
1268            m->exception_table_start(),
1269            exception_table_len * sizeof(ExceptionTableElement));
1270   }
1271   // Copy local variable number table
1272   if (localvariable_len > 0) {
1273     memcpy(newm->localvariable_table_start(),
1274            m->localvariable_table_start(),
1275            localvariable_len * sizeof(LocalVariableTableElement));
1276   }
1277   // Copy stackmap table
1278   if (m->has_stackmap_table()) {
1279     int code_attribute_length = m->stackmap_data()->length();
1280     Array<u1>* stackmap_data =
1281       MetadataFactory::new_array<u1>(loader_data, code_attribute_length, 0, CHECK_NULL);
1282     memcpy((void*)stackmap_data->adr_at(0),
1283            (void*)m->stackmap_data()->adr_at(0), code_attribute_length);
1284     newm->set_stackmap_data(stackmap_data);
1285   }
1286 
1287   // copy annotations over to new method
1288   newcm->copy_annotations_from(cm);
1289   return newm;
1290 }
1291 
1292 vmSymbols::SID Method::klass_id_for_intrinsics(Klass* holder) {
1293   // if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
1294   // because we are not loading from core libraries
1295   // exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
1296   // which does not use the class default class loader so we check for its loader here
1297   InstanceKlass* ik = InstanceKlass::cast(holder);
1298   if ((ik->class_loader() != NULL) && !SystemDictionary::is_ext_class_loader(ik->class_loader())) {
1299     return vmSymbols::NO_SID;   // regardless of name, no intrinsics here
1300   }
1301 
1302   // see if the klass name is well-known:
1303   Symbol* klass_name = ik->name();
1304   return vmSymbols::find_sid(klass_name);
1305 }
1306 
1307 void Method::init_intrinsic_id() {
1308   assert(_intrinsic_id == vmIntrinsics::_none, "do this just once");
1309   const uintptr_t max_id_uint = right_n_bits((int)(sizeof(_intrinsic_id) * BitsPerByte));
1310   assert((uintptr_t)vmIntrinsics::ID_LIMIT <= max_id_uint, "else fix size");
1311   assert(intrinsic_id_size_in_bytes() == sizeof(_intrinsic_id), "");
1312 
1313   // the klass name is well-known:
1314   vmSymbols::SID klass_id = klass_id_for_intrinsics(method_holder());
1315   assert(klass_id != vmSymbols::NO_SID, "caller responsibility");
1316 
1317   // ditto for method and signature:
1318   vmSymbols::SID  name_id = vmSymbols::find_sid(name());
1319   if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1320       && name_id == vmSymbols::NO_SID)
1321     return;
1322   vmSymbols::SID   sig_id = vmSymbols::find_sid(signature());
1323   if (klass_id != vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle)
1324       && sig_id == vmSymbols::NO_SID)  return;
1325   jshort flags = access_flags().as_short();
1326 
1327   vmIntrinsics::ID id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1328   if (id != vmIntrinsics::_none) {
1329     set_intrinsic_id(id);
1330     if (id == vmIntrinsics::_Class_cast) {
1331       // Even if the intrinsic is rejected, we want to inline this simple method.
1332       set_force_inline(true);
1333     }
1334     return;
1335   }
1336 
1337   // A few slightly irregular cases:
1338   switch (klass_id) {
1339   case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_StrictMath):
1340     // Second chance: check in regular Math.
1341     switch (name_id) {
1342     case vmSymbols::VM_SYMBOL_ENUM_NAME(min_name):
1343     case vmSymbols::VM_SYMBOL_ENUM_NAME(max_name):
1344     case vmSymbols::VM_SYMBOL_ENUM_NAME(sqrt_name):
1345       // pretend it is the corresponding method in the non-strict class:
1346       klass_id = vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_Math);
1347       id = vmIntrinsics::find_id(klass_id, name_id, sig_id, flags);
1348       break;
1349     }
1350     break;
1351 
1352   // Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*.
1353   case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_MethodHandle):
1354     if (!is_native())  break;
1355     id = MethodHandles::signature_polymorphic_name_id(method_holder(), name());
1356     if (is_static() != MethodHandles::is_signature_polymorphic_static(id))
1357       id = vmIntrinsics::_none;
1358     break;
1359   }
1360 
1361   if (id != vmIntrinsics::_none) {
1362     // Set up its iid.  It is an alias method.
1363     set_intrinsic_id(id);
1364     return;
1365   }
1366 }
1367 
1368 // These two methods are static since a GC may move the Method
1369 bool Method::load_signature_classes(methodHandle m, TRAPS) {
1370   if (THREAD->is_Compiler_thread()) {
1371     // There is nothing useful this routine can do from within the Compile thread.
1372     // Hopefully, the signature contains only well-known classes.
1373     // We could scan for this and return true/false, but the caller won't care.
1374     return false;
1375   }
1376   bool sig_is_loaded = true;
1377   Handle class_loader(THREAD, m->method_holder()->class_loader());
1378   Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1379   ResourceMark rm(THREAD);
1380   Symbol*  signature = m->signature();
1381   for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1382     if (ss.is_object()) {
1383       Symbol* sym = ss.as_symbol(CHECK_(false));
1384       Symbol*  name  = sym;
1385       Klass* klass = SystemDictionary::resolve_or_null(name, class_loader,
1386                                              protection_domain, THREAD);
1387       // We are loading classes eagerly. If a ClassNotFoundException or
1388       // a LinkageError was generated, be sure to ignore it.
1389       if (HAS_PENDING_EXCEPTION) {
1390         if (PENDING_EXCEPTION->is_a(SystemDictionary::ClassNotFoundException_klass()) ||
1391             PENDING_EXCEPTION->is_a(SystemDictionary::LinkageError_klass())) {
1392           CLEAR_PENDING_EXCEPTION;
1393         } else {
1394           return false;
1395         }
1396       }
1397       if( klass == NULL) { sig_is_loaded = false; }
1398     }
1399   }
1400   return sig_is_loaded;
1401 }
1402 
1403 bool Method::has_unloaded_classes_in_signature(methodHandle m, TRAPS) {
1404   Handle class_loader(THREAD, m->method_holder()->class_loader());
1405   Handle protection_domain(THREAD, m->method_holder()->protection_domain());
1406   ResourceMark rm(THREAD);
1407   Symbol*  signature = m->signature();
1408   for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
1409     if (ss.type() == T_OBJECT) {
1410       Symbol* name = ss.as_symbol_or_null();
1411       if (name == NULL) return true;
1412       Klass* klass = SystemDictionary::find(name, class_loader, protection_domain, THREAD);
1413       if (klass == NULL) return true;
1414     }
1415   }
1416   return false;
1417 }
1418 
1419 // Exposed so field engineers can debug VM
1420 void Method::print_short_name(outputStream* st) {
1421   ResourceMark rm;
1422 #ifdef PRODUCT
1423   st->print(" %s::", method_holder()->external_name());
1424 #else
1425   st->print(" %s::", method_holder()->internal_name());
1426 #endif
1427   name()->print_symbol_on(st);
1428   if (WizardMode) signature()->print_symbol_on(st);
1429   else if (MethodHandles::is_signature_polymorphic(intrinsic_id()))
1430     MethodHandles::print_as_basic_type_signature_on(st, signature(), true);
1431 }
1432 
1433 // Comparer for sorting an object array containing
1434 // Method*s.
1435 static int method_comparator(Method* a, Method* b) {
1436   return a->name()->fast_compare(b->name());
1437 }
1438 
1439 // This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
1440 // default_methods also uses this without the ordering for fast find_method
1441 void Method::sort_methods(Array<Method*>* methods, bool idempotent, bool set_idnums) {
1442   int length = methods->length();
1443   if (length > 1) {
1444     {
1445       No_Safepoint_Verifier nsv;
1446       QuickSort::sort<Method*>(methods->data(), length, method_comparator, idempotent);
1447     }
1448     // Reset method ordering
1449     if (set_idnums) {
1450       for (int i = 0; i < length; i++) {
1451         Method* m = methods->at(i);
1452         m->set_method_idnum(i);
1453       }
1454     }
1455   }
1456 }
1457 
1458 //-----------------------------------------------------------------------------------
1459 // Non-product code unless JVM/TI needs it
1460 
1461 #if !defined(PRODUCT) || INCLUDE_JVMTI
1462 class SignatureTypePrinter : public SignatureTypeNames {
1463  private:
1464   outputStream* _st;
1465   bool _use_separator;
1466 
1467   void type_name(const char* name) {
1468     if (_use_separator) _st->print(", ");
1469     _st->print("%s", name);
1470     _use_separator = true;
1471   }
1472 
1473  public:
1474   SignatureTypePrinter(Symbol* signature, outputStream* st) : SignatureTypeNames(signature) {
1475     _st = st;
1476     _use_separator = false;
1477   }
1478 
1479   void print_parameters()              { _use_separator = false; iterate_parameters(); }
1480   void print_returntype()              { _use_separator = false; iterate_returntype(); }
1481 };
1482 
1483 
1484 void Method::print_name(outputStream* st) {
1485   Thread *thread = Thread::current();
1486   ResourceMark rm(thread);
1487   SignatureTypePrinter sig(signature(), st);
1488   st->print("%s ", is_static() ? "static" : "virtual");
1489   sig.print_returntype();
1490   st->print(" %s.", method_holder()->internal_name());
1491   name()->print_symbol_on(st);
1492   st->print("(");
1493   sig.print_parameters();
1494   st->print(")");
1495 }
1496 #endif // !PRODUCT || INCLUDE_JVMTI
1497 
1498 
1499 void Method::print_codes_on(outputStream* st) const {
1500   print_codes_on(0, code_size(), st);
1501 }
1502 
1503 void Method::print_codes_on(int from, int to, outputStream* st) const {
1504   Thread *thread = Thread::current();
1505   ResourceMark rm(thread);
1506   methodHandle mh (thread, (Method*)this);
1507   BytecodeStream s(mh);
1508   s.set_interval(from, to);
1509   BytecodeTracer::set_closure(BytecodeTracer::std_closure());
1510   while (s.next() >= 0) BytecodeTracer::trace(mh, s.bcp(), st);
1511 }
1512 
1513 
1514 // Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas
1515 // between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
1516 // we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used
1517 // as end-of-stream terminator.
1518 
1519 void CompressedLineNumberWriteStream::write_pair_regular(int bci_delta, int line_delta) {
1520   // bci and line number does not compress into single byte.
1521   // Write out escape character and use regular compression for bci and line number.
1522   write_byte((jubyte)0xFF);
1523   write_signed_int(bci_delta);
1524   write_signed_int(line_delta);
1525 }
1526 
1527 // See comment in method.hpp which explains why this exists.
1528 #if defined(_M_AMD64) && _MSC_VER >= 1400
1529 #pragma optimize("", off)
1530 void CompressedLineNumberWriteStream::write_pair(int bci, int line) {
1531   write_pair_inline(bci, line);
1532 }
1533 #pragma optimize("", on)
1534 #endif
1535 
1536 CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
1537   _bci = 0;
1538   _line = 0;
1539 };
1540 
1541 
1542 bool CompressedLineNumberReadStream::read_pair() {
1543   jubyte next = read_byte();
1544   // Check for terminator
1545   if (next == 0) return false;
1546   if (next == 0xFF) {
1547     // Escape character, regular compression used
1548     _bci  += read_signed_int();
1549     _line += read_signed_int();
1550   } else {
1551     // Single byte compression used
1552     _bci  += next >> 3;
1553     _line += next & 0x7;
1554   }
1555   return true;
1556 }
1557 
1558 
1559 Bytecodes::Code Method::orig_bytecode_at(int bci) const {
1560   BreakpointInfo* bp = method_holder()->breakpoints();
1561   for (; bp != NULL; bp = bp->next()) {
1562     if (bp->match(this, bci)) {
1563       return bp->orig_bytecode();
1564     }
1565   }
1566   {
1567     ResourceMark rm;
1568     fatal(err_msg("no original bytecode found in %s at bci %d", name_and_sig_as_C_string(), bci));
1569   }
1570   return Bytecodes::_shouldnotreachhere;
1571 }
1572 
1573 void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
1574   assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
1575   BreakpointInfo* bp = method_holder()->breakpoints();
1576   for (; bp != NULL; bp = bp->next()) {
1577     if (bp->match(this, bci)) {
1578       bp->set_orig_bytecode(code);
1579       // and continue, in case there is more than one
1580     }
1581   }
1582 }
1583 
1584 void Method::set_breakpoint(int bci) {
1585   InstanceKlass* ik = method_holder();
1586   BreakpointInfo *bp = new BreakpointInfo(this, bci);
1587   bp->set_next(ik->breakpoints());
1588   ik->set_breakpoints(bp);
1589   // do this last:
1590   bp->set(this);
1591 }
1592 
1593 static void clear_matches(Method* m, int bci) {
1594   InstanceKlass* ik = m->method_holder();
1595   BreakpointInfo* prev_bp = NULL;
1596   BreakpointInfo* next_bp;
1597   for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {
1598     next_bp = bp->next();
1599     // bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
1600     if (bci >= 0 ? bp->match(m, bci) : bp->match(m)) {
1601       // do this first:
1602       bp->clear(m);
1603       // unhook it
1604       if (prev_bp != NULL)
1605         prev_bp->set_next(next_bp);
1606       else
1607         ik->set_breakpoints(next_bp);
1608       delete bp;
1609       // When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
1610       // at same location. So we have multiple matching (method_index and bci)
1611       // BreakpointInfo nodes in BreakpointInfo list. We should just delete one
1612       // breakpoint for clear_breakpoint request and keep all other method versions
1613       // BreakpointInfo for future clear_breakpoint request.
1614       // bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
1615       // which is being called when class is unloaded. We delete all the Breakpoint
1616       // information for all versions of method. We may not correctly restore the original
1617       // bytecode in all method versions, but that is ok. Because the class is being unloaded
1618       // so these methods won't be used anymore.
1619       if (bci >= 0) {
1620         break;
1621       }
1622     } else {
1623       // This one is a keeper.
1624       prev_bp = bp;
1625     }
1626   }
1627 }
1628 
1629 void Method::clear_breakpoint(int bci) {
1630   assert(bci >= 0, "");
1631   clear_matches(this, bci);
1632 }
1633 
1634 void Method::clear_all_breakpoints() {
1635   clear_matches(this, -1);
1636 }
1637 
1638 
1639 int Method::invocation_count() {
1640   MethodCounters *mcs = method_counters();
1641   if (TieredCompilation) {
1642     MethodData* const mdo = method_data();
1643     if (((mcs != NULL) ? mcs->invocation_counter()->carry() : false) ||
1644         ((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {
1645       return InvocationCounter::count_limit;
1646     } else {
1647       return ((mcs != NULL) ? mcs->invocation_counter()->count() : 0) +
1648              ((mdo != NULL) ? mdo->invocation_counter()->count() : 0);
1649     }
1650   } else {
1651     return (mcs == NULL) ? 0 : mcs->invocation_counter()->count();
1652   }
1653 }
1654 
1655 int Method::backedge_count() {
1656   MethodCounters *mcs = method_counters();
1657   if (TieredCompilation) {
1658     MethodData* const mdo = method_data();
1659     if (((mcs != NULL) ? mcs->backedge_counter()->carry() : false) ||
1660         ((mdo != NULL) ? mdo->backedge_counter()->carry() : false)) {
1661       return InvocationCounter::count_limit;
1662     } else {
1663       return ((mcs != NULL) ? mcs->backedge_counter()->count() : 0) +
1664              ((mdo != NULL) ? mdo->backedge_counter()->count() : 0);
1665     }
1666   } else {
1667     return (mcs == NULL) ? 0 : mcs->backedge_counter()->count();
1668   }
1669 }
1670 
1671 int Method::highest_comp_level() const {
1672   const MethodCounters* mcs = method_counters();
1673   if (mcs != NULL) {
1674     return mcs->highest_comp_level();
1675   } else {
1676     return CompLevel_none;
1677   }
1678 }
1679 
1680 int Method::highest_osr_comp_level() const {
1681   const MethodCounters* mcs = method_counters();
1682   if (mcs != NULL) {
1683     return mcs->highest_osr_comp_level();
1684   } else {
1685     return CompLevel_none;
1686   }
1687 }
1688 
1689 void Method::set_highest_comp_level(int level) {
1690   MethodCounters* mcs = method_counters();
1691   if (mcs != NULL) {
1692     mcs->set_highest_comp_level(level);
1693   }
1694 }
1695 
1696 void Method::set_highest_osr_comp_level(int level) {
1697   MethodCounters* mcs = method_counters();
1698   if (mcs != NULL) {
1699     mcs->set_highest_osr_comp_level(level);
1700   }
1701 }
1702 
1703 BreakpointInfo::BreakpointInfo(Method* m, int bci) {
1704   _bci = bci;
1705   _name_index = m->name_index();
1706   _signature_index = m->signature_index();
1707   _orig_bytecode = (Bytecodes::Code) *m->bcp_from(_bci);
1708   if (_orig_bytecode == Bytecodes::_breakpoint)
1709     _orig_bytecode = m->orig_bytecode_at(_bci);
1710   _next = NULL;
1711 }
1712 
1713 void BreakpointInfo::set(Method* method) {
1714 #ifdef ASSERT
1715   {
1716     Bytecodes::Code code = (Bytecodes::Code) *method->bcp_from(_bci);
1717     if (code == Bytecodes::_breakpoint)
1718       code = method->orig_bytecode_at(_bci);
1719     assert(orig_bytecode() == code, "original bytecode must be the same");
1720   }
1721 #endif
1722   Thread *thread = Thread::current();
1723   *method->bcp_from(_bci) = Bytecodes::_breakpoint;
1724   method->incr_number_of_breakpoints(thread);
1725   SystemDictionary::notice_modification();
1726   {
1727     // Deoptimize all dependents on this method
1728     HandleMark hm(thread);
1729     methodHandle mh(thread, method);
1730     Universe::flush_dependents_on_method(mh);
1731   }
1732 }
1733 
1734 void BreakpointInfo::clear(Method* method) {
1735   *method->bcp_from(_bci) = orig_bytecode();
1736   assert(method->number_of_breakpoints() > 0, "must not go negative");
1737   method->decr_number_of_breakpoints(Thread::current());
1738 }
1739 
1740 // jmethodID handling
1741 
1742 // This is a block allocating object, sort of like JNIHandleBlock, only a
1743 // lot simpler.
1744 // It's allocated on the CHeap because once we allocate a jmethodID, we can
1745 // never get rid of it.
1746 
1747 static const int min_block_size = 8;
1748 
1749 class JNIMethodBlockNode : public CHeapObj<mtClass> {
1750   friend class JNIMethodBlock;
1751   Method**        _methods;
1752   int             _number_of_methods;
1753   int             _top;
1754   JNIMethodBlockNode* _next;
1755 
1756  public:
1757 
1758   JNIMethodBlockNode(int num_methods = min_block_size);
1759 
1760   ~JNIMethodBlockNode() { FREE_C_HEAP_ARRAY(Method*, _methods, mtInternal); }
1761 
1762   void ensure_methods(int num_addl_methods) {
1763     if (_top < _number_of_methods) {
1764       num_addl_methods -= _number_of_methods - _top;
1765       if (num_addl_methods <= 0) {
1766         return;
1767       }
1768     }
1769     if (_next == NULL) {
1770       _next = new JNIMethodBlockNode(MAX2(num_addl_methods, min_block_size));
1771     } else {
1772       _next->ensure_methods(num_addl_methods);
1773     }
1774   }
1775 };
1776 
1777 class JNIMethodBlock : public CHeapObj<mtClass> {
1778   JNIMethodBlockNode _head;
1779   JNIMethodBlockNode *_last_free;
1780  public:
1781   static Method* const _free_method;
1782 
1783   JNIMethodBlock(int initial_capacity = min_block_size)
1784       : _head(initial_capacity), _last_free(&_head) {}
1785 
1786   void ensure_methods(int num_addl_methods) {
1787     _last_free->ensure_methods(num_addl_methods);
1788   }
1789 
1790   Method** add_method(Method* m) {
1791     for (JNIMethodBlockNode* b = _last_free; b != NULL; b = b->_next) {
1792       if (b->_top < b->_number_of_methods) {
1793         // top points to the next free entry.
1794         int i = b->_top;
1795         b->_methods[i] = m;
1796         b->_top++;
1797         _last_free = b;
1798         return &(b->_methods[i]);
1799       } else if (b->_top == b->_number_of_methods) {
1800         // if the next free entry ran off the block see if there's a free entry
1801         for (int i = 0; i < b->_number_of_methods; i++) {
1802           if (b->_methods[i] == _free_method) {
1803             b->_methods[i] = m;
1804             _last_free = b;
1805             return &(b->_methods[i]);
1806           }
1807         }
1808         // Only check each block once for frees.  They're very unlikely.
1809         // Increment top past the end of the block.
1810         b->_top++;
1811       }
1812       // need to allocate a next block.
1813       if (b->_next == NULL) {
1814         b->_next = _last_free = new JNIMethodBlockNode();
1815       }
1816     }
1817     guarantee(false, "Should always allocate a free block");
1818     return NULL;
1819   }
1820 
1821   bool contains(Method** m) {
1822     if (m == NULL) return false;
1823     for (JNIMethodBlockNode* b = &_head; b != NULL; b = b->_next) {
1824       if (b->_methods <= m && m < b->_methods + b->_number_of_methods) {
1825         // This is a bit of extra checking, for two reasons.  One is
1826         // that contains() deals with pointers that are passed in by
1827         // JNI code, so making sure that the pointer is aligned
1828         // correctly is valuable.  The other is that <= and > are
1829         // technically not defined on pointers, so the if guard can
1830         // pass spuriously; no modern compiler is likely to make that
1831         // a problem, though (and if one did, the guard could also
1832         // fail spuriously, which would be bad).
1833         ptrdiff_t idx = m - b->_methods;
1834         if (b->_methods + idx == m) {
1835           return true;
1836         }
1837       }
1838     }
1839     return false;  // not found
1840   }
1841 
1842   // Doesn't really destroy it, just marks it as free so it can be reused.
1843   void destroy_method(Method** m) {
1844 #ifdef ASSERT
1845     assert(contains(m), "should be a methodID");
1846 #endif // ASSERT
1847     *m = _free_method;
1848   }
1849 
1850   // During class unloading the methods are cleared, which is different
1851   // than freed.
1852   void clear_all_methods() {
1853     for (JNIMethodBlockNode* b = &_head; b != NULL; b = b->_next) {
1854       for (int i = 0; i< b->_number_of_methods; i++) {
1855         b->_methods[i] = NULL;
1856       }
1857     }
1858   }
1859 #ifndef PRODUCT
1860   int count_methods() {
1861     // count all allocated methods
1862     int count = 0;
1863     for (JNIMethodBlockNode* b = &_head; b != NULL; b = b->_next) {
1864       for (int i = 0; i< b->_number_of_methods; i++) {
1865         if (b->_methods[i] != _free_method) count++;
1866       }
1867     }
1868     return count;
1869   }
1870 #endif // PRODUCT
1871 };
1872 
1873 // Something that can't be mistaken for an address or a markOop
1874 Method* const JNIMethodBlock::_free_method = (Method*)55;
1875 
1876 JNIMethodBlockNode::JNIMethodBlockNode(int num_methods) : _next(NULL), _top(0) {
1877   _number_of_methods = MAX2(num_methods, min_block_size);
1878   _methods = NEW_C_HEAP_ARRAY(Method*, _number_of_methods, mtInternal);
1879   for (int i = 0; i < _number_of_methods; i++) {
1880     _methods[i] = JNIMethodBlock::_free_method;
1881   }
1882 }
1883 
1884 void Method::ensure_jmethod_ids(ClassLoaderData* loader_data, int capacity) {
1885   ClassLoaderData* cld = loader_data;
1886   if (!SafepointSynchronize::is_at_safepoint()) {
1887     // Have to add jmethod_ids() to class loader data thread-safely.
1888     // Also have to add the method to the list safely, which the cld lock
1889     // protects as well.
1890     MutexLockerEx ml(cld->metaspace_lock(),  Mutex::_no_safepoint_check_flag);
1891     if (cld->jmethod_ids() == NULL) {
1892       cld->set_jmethod_ids(new JNIMethodBlock(capacity));
1893     } else {
1894       cld->jmethod_ids()->ensure_methods(capacity);
1895     }
1896   } else {
1897     // At safepoint, we are single threaded and can set this.
1898     if (cld->jmethod_ids() == NULL) {
1899       cld->set_jmethod_ids(new JNIMethodBlock(capacity));
1900     } else {
1901       cld->jmethod_ids()->ensure_methods(capacity);
1902     }
1903   }
1904 }
1905 
1906 // Add a method id to the jmethod_ids
1907 jmethodID Method::make_jmethod_id(ClassLoaderData* loader_data, Method* m) {
1908   ClassLoaderData* cld = loader_data;
1909 
1910   if (!SafepointSynchronize::is_at_safepoint()) {
1911     // Have to add jmethod_ids() to class loader data thread-safely.
1912     // Also have to add the method to the list safely, which the cld lock
1913     // protects as well.
1914     MutexLockerEx ml(cld->metaspace_lock(),  Mutex::_no_safepoint_check_flag);
1915     if (cld->jmethod_ids() == NULL) {
1916       cld->set_jmethod_ids(new JNIMethodBlock());
1917     }
1918     // jmethodID is a pointer to Method*
1919     return (jmethodID)cld->jmethod_ids()->add_method(m);
1920   } else {
1921     // At safepoint, we are single threaded and can set this.
1922     if (cld->jmethod_ids() == NULL) {
1923       cld->set_jmethod_ids(new JNIMethodBlock());
1924     }
1925     // jmethodID is a pointer to Method*
1926     return (jmethodID)cld->jmethod_ids()->add_method(m);
1927   }
1928 }
1929 
1930 // Mark a jmethodID as free.  This is called when there is a data race in
1931 // InstanceKlass while creating the jmethodID cache.
1932 void Method::destroy_jmethod_id(ClassLoaderData* loader_data, jmethodID m) {
1933   ClassLoaderData* cld = loader_data;
1934   Method** ptr = (Method**)m;
1935   assert(cld->jmethod_ids() != NULL, "should have method handles");
1936   cld->jmethod_ids()->destroy_method(ptr);
1937 }
1938 
1939 void Method::change_method_associated_with_jmethod_id(jmethodID jmid, Method* new_method) {
1940   // Can't assert the method_holder is the same because the new method has the
1941   // scratch method holder.
1942   assert(resolve_jmethod_id(jmid)->method_holder()->class_loader()
1943            == new_method->method_holder()->class_loader(),
1944          "changing to a different class loader");
1945   // Just change the method in place, jmethodID pointer doesn't change.
1946   *((Method**)jmid) = new_method;
1947 }
1948 
1949 bool Method::is_method_id(jmethodID mid) {
1950   Method* m = resolve_jmethod_id(mid);
1951   assert(m != NULL, "should be called with non-null method");
1952   InstanceKlass* ik = m->method_holder();
1953   ClassLoaderData* cld = ik->class_loader_data();
1954   if (cld->jmethod_ids() == NULL) return false;
1955   return (cld->jmethod_ids()->contains((Method**)mid));
1956 }
1957 
1958 Method* Method::checked_resolve_jmethod_id(jmethodID mid) {
1959   if (mid == NULL) return NULL;
1960   Method* o = resolve_jmethod_id(mid);
1961   if (o == NULL || o == JNIMethodBlock::_free_method || !((Metadata*)o)->is_method()) {
1962     return NULL;
1963   }
1964   return o;
1965 };
1966 
1967 void Method::set_on_stack(const bool value) {
1968   // Set both the method itself and its constant pool.  The constant pool
1969   // on stack means some method referring to it is also on the stack.
1970   constants()->set_on_stack(value);
1971 
1972   bool succeeded = _access_flags.set_on_stack(value);
1973   if (value && succeeded) {
1974     MetadataOnStackMark::record(this, Thread::current());
1975   }
1976 }
1977 
1978 // Called when the class loader is unloaded to make all methods weak.
1979 void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
1980   loader_data->jmethod_ids()->clear_all_methods();
1981 }
1982 
1983 bool Method::has_method_vptr(const void* ptr) {
1984   Method m;
1985   // This assumes that the vtbl pointer is the first word of a C++ object.
1986   // This assumption is also in universe.cpp patch_klass_vtble
1987   void* vtbl2 = dereference_vptr((const void*)&m);
1988   void* this_vtbl = dereference_vptr(ptr);
1989   return vtbl2 == this_vtbl;
1990 }
1991 
1992 // Check that this pointer is valid by checking that the vtbl pointer matches
1993 bool Method::is_valid_method() const {
1994   if (this == NULL) {
1995     return false;
1996   } else if (!is_metaspace_object()) {
1997     return false;
1998   } else {
1999     return has_method_vptr((const void*)this);
2000   }
2001 }
2002 
2003 #ifndef PRODUCT
2004 void Method::print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) {
2005   out->print_cr("jni_method_id count = %d", loader_data->jmethod_ids()->count_methods());
2006 }
2007 #endif // PRODUCT
2008 
2009 
2010 // Printing
2011 
2012 #ifndef PRODUCT
2013 
2014 void Method::print_on(outputStream* st) const {
2015   ResourceMark rm;
2016   assert(is_method(), "must be method");
2017   st->print_cr("%s", internal_name());
2018   // get the effect of PrintOopAddress, always, for methods:
2019   st->print_cr(" - this oop:          "INTPTR_FORMAT, (intptr_t)this);
2020   st->print   (" - method holder:     "); method_holder()->print_value_on(st); st->cr();
2021   st->print   (" - constants:         "INTPTR_FORMAT" ", (address)constants());
2022   constants()->print_value_on(st); st->cr();
2023   st->print   (" - access:            0x%x  ", access_flags().as_int()); access_flags().print_on(st); st->cr();
2024   st->print   (" - name:              ");    name()->print_value_on(st); st->cr();
2025   st->print   (" - signature:         ");    signature()->print_value_on(st); st->cr();
2026   st->print_cr(" - max stack:         %d",   max_stack());
2027   st->print_cr(" - max locals:        %d",   max_locals());
2028   st->print_cr(" - size of params:    %d",   size_of_parameters());
2029   st->print_cr(" - method size:       %d",   method_size());
2030   if (intrinsic_id() != vmIntrinsics::_none)
2031     st->print_cr(" - intrinsic id:      %d %s", intrinsic_id(), vmIntrinsics::name_at(intrinsic_id()));
2032   if (highest_comp_level() != CompLevel_none)
2033     st->print_cr(" - highest level:     %d", highest_comp_level());
2034   st->print_cr(" - vtable index:      %d",   _vtable_index);
2035   st->print_cr(" - i2i entry:         " INTPTR_FORMAT, interpreter_entry());
2036   st->print(   " - adapters:          ");
2037   AdapterHandlerEntry* a = ((Method*)this)->adapter();
2038   if (a == NULL)
2039     st->print_cr(INTPTR_FORMAT, a);
2040   else
2041     a->print_adapter_on(st);
2042   st->print_cr(" - compiled entry     " INTPTR_FORMAT, from_compiled_entry());
2043   st->print_cr(" - code size:         %d",   code_size());
2044   if (code_size() != 0) {
2045     st->print_cr(" - code start:        " INTPTR_FORMAT, code_base());
2046     st->print_cr(" - code end (excl):   " INTPTR_FORMAT, code_base() + code_size());
2047   }
2048   if (method_data() != NULL) {
2049     st->print_cr(" - method data:       " INTPTR_FORMAT, (address)method_data());
2050   }
2051   st->print_cr(" - checked ex length: %d",   checked_exceptions_length());
2052   if (checked_exceptions_length() > 0) {
2053     CheckedExceptionElement* table = checked_exceptions_start();
2054     st->print_cr(" - checked ex start:  " INTPTR_FORMAT, table);
2055     if (Verbose) {
2056       for (int i = 0; i < checked_exceptions_length(); i++) {
2057         st->print_cr("   - throws %s", constants()->printable_name_at(table[i].class_cp_index));
2058       }
2059     }
2060   }
2061   if (has_linenumber_table()) {
2062     u_char* table = compressed_linenumber_table();
2063     st->print_cr(" - linenumber start:  " INTPTR_FORMAT, table);
2064     if (Verbose) {
2065       CompressedLineNumberReadStream stream(table);
2066       while (stream.read_pair()) {
2067         st->print_cr("   - line %d: %d", stream.line(), stream.bci());
2068       }
2069     }
2070   }
2071   st->print_cr(" - localvar length:   %d",   localvariable_table_length());
2072   if (localvariable_table_length() > 0) {
2073     LocalVariableTableElement* table = localvariable_table_start();
2074     st->print_cr(" - localvar start:    " INTPTR_FORMAT, table);
2075     if (Verbose) {
2076       for (int i = 0; i < localvariable_table_length(); i++) {
2077         int bci = table[i].start_bci;
2078         int len = table[i].length;
2079         const char* name = constants()->printable_name_at(table[i].name_cp_index);
2080         const char* desc = constants()->printable_name_at(table[i].descriptor_cp_index);
2081         int slot = table[i].slot;
2082         st->print_cr("   - %s %s bci=%d len=%d slot=%d", desc, name, bci, len, slot);
2083       }
2084     }
2085   }
2086   if (code() != NULL) {
2087     st->print   (" - compiled code: ");
2088     code()->print_value_on(st);
2089   }
2090   if (is_native()) {
2091     st->print_cr(" - native function:   " INTPTR_FORMAT, native_function());
2092     st->print_cr(" - signature handler: " INTPTR_FORMAT, signature_handler());
2093   }
2094 }
2095 
2096 #endif //PRODUCT
2097 
2098 void Method::print_value_on(outputStream* st) const {
2099   assert(is_method(), "must be method");
2100   st->print("%s", internal_name());
2101   print_address_on(st);
2102   st->print(" ");
2103   name()->print_value_on(st);
2104   st->print(" ");
2105   signature()->print_value_on(st);
2106   st->print(" in ");
2107   method_holder()->print_value_on(st);
2108   if (WizardMode) st->print("#%d", _vtable_index);
2109   if (WizardMode) st->print("[%d,%d]", size_of_parameters(), max_locals());
2110   if (WizardMode && code() != NULL) st->print(" ((nmethod*)%p)", code());
2111 }
2112 
2113 #if INCLUDE_SERVICES
2114 // Size Statistics
2115 void Method::collect_statistics(KlassSizeStats *sz) const {
2116   int mysize = sz->count(this);
2117   sz->_method_bytes += mysize;
2118   sz->_method_all_bytes += mysize;
2119   sz->_rw_bytes += mysize;
2120 
2121   if (constMethod()) {
2122     constMethod()->collect_statistics(sz);
2123   }
2124   if (method_data()) {
2125     method_data()->collect_statistics(sz);
2126   }
2127 }
2128 #endif // INCLUDE_SERVICES
2129 
2130 // Verification
2131 
2132 void Method::verify_on(outputStream* st) {
2133   guarantee(is_method(), "object must be method");
2134   guarantee(constants()->is_constantPool(), "should be constant pool");
2135   guarantee(constMethod()->is_constMethod(), "should be ConstMethod*");
2136   MethodData* md = method_data();
2137   guarantee(md == NULL ||
2138       md->is_methodData(), "should be method data");
2139 }