1 /*
   2  * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciArrayKlass.hpp"
  27 #include "ci/ciEnv.hpp"
  28 #include "ci/ciKlass.hpp"
  29 #include "ci/ciMethod.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "oops/oop.inline.hpp"
  33 #include "runtime/handles.hpp"
  34 #include "runtime/handles.inline.hpp"
  35 #include "runtime/thread.inline.hpp"
  36 #include "utilities/copy.hpp"
  37 
  38 
  39 #ifdef ASSERT
  40 static bool must_be_in_vm() {
  41   Thread* thread = Thread::current();
  42   if (thread->is_Java_thread())
  43     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  44   else
  45     return true;  //something like this: thread->is_VM_thread();
  46 }
  47 #endif //ASSERT
  48 
  49 void Dependencies::initialize(ciEnv* env) {
  50   Arena* arena = env->arena();
  51   _oop_recorder = env->oop_recorder();
  52   _log = env->log();
  53   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  54   DEBUG_ONLY(_deps[end_marker] = NULL);
  55   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  56     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  57   }
  58   _content_bytes = NULL;
  59   _size_in_bytes = (size_t)-1;
  60 
  61   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  62 }
  63 
  64 void Dependencies::assert_evol_method(ciMethod* m) {
  65   assert_common_1(evol_method, m);
  66 }
  67 
  68 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  69   if (ctxk->is_array_klass()) {
  70     // As a special case, support this assertion on an array type,
  71     // which reduces to an assertion on its element type.
  72     // Note that this cannot be done with assertions that
  73     // relate to concreteness or abstractness.
  74     ciType* elemt = ctxk->as_array_klass()->base_element_type();
  75     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
  76     ctxk = elemt->as_instance_klass();
  77     //if (ctxk->is_final())  return;            // Ex:  String[][]
  78   }
  79   check_ctxk(ctxk);
  80   assert_common_1(leaf_type, ctxk);
  81 }
  82 
  83 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  84   check_ctxk_abstract(ctxk);
  85   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  86 }
  87 
  88 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
  89   check_ctxk_abstract(ctxk);
  90   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
  91 }
  92 
  93 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
  94   check_ctxk_concrete(ctxk);
  95   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
  96 }
  97 
  98 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
  99   check_ctxk(ctxk);
 100   assert_common_2(unique_concrete_method, ctxk, uniqm);
 101 }
 102 
 103 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
 104   check_ctxk(ctxk);
 105   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
 106 }
 107 
 108 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
 109   check_ctxk(ctxk);
 110   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
 111 }
 112 
 113 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
 114   check_ctxk(ctxk);
 115   assert_common_1(no_finalizable_subclasses, ctxk);
 116 }
 117 
 118 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
 119   check_ctxk(call_site->klass());
 120   assert_common_2(call_site_target_value, call_site, method_handle);
 121 }
 122 
 123 // Helper function.  If we are adding a new dep. under ctxk2,
 124 // try to find an old dep. under a broader* ctxk1.  If there is
 125 //
 126 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
 127                                     int ctxk_i, ciKlass* ctxk2) {
 128   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
 129   if (ctxk2->is_subtype_of(ctxk1)) {
 130     return true;  // success, and no need to change
 131   } else if (ctxk1->is_subtype_of(ctxk2)) {
 132     // new context class fully subsumes previous one
 133     deps->at_put(ctxk_i, ctxk2);
 134     return true;
 135   } else {
 136     return false;
 137   }
 138 }
 139 
 140 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
 141   assert(dep_args(dept) == 1, "sanity");
 142   log_dependency(dept, x);
 143   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 144 
 145   // see if the same (or a similar) dep is already recorded
 146   if (note_dep_seen(dept, x)) {
 147     assert(deps->find(x) >= 0, "sanity");
 148   } else {
 149     deps->append(x);
 150   }
 151 }
 152 
 153 void Dependencies::assert_common_2(DepType dept,
 154                                    ciBaseObject* x0, ciBaseObject* x1) {
 155   assert(dep_args(dept) == 2, "sanity");
 156   log_dependency(dept, x0, x1);
 157   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 158 
 159   // see if the same (or a similar) dep is already recorded
 160   bool has_ctxk = has_explicit_context_arg(dept);
 161   if (has_ctxk) {
 162     assert(dep_context_arg(dept) == 0, "sanity");
 163     if (note_dep_seen(dept, x1)) {
 164       // look in this bucket for redundant assertions
 165       const int stride = 2;
 166       for (int i = deps->length(); (i -= stride) >= 0; ) {
 167         ciBaseObject* y1 = deps->at(i+1);
 168         if (x1 == y1) {  // same subject; check the context
 169           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
 170             return;
 171           }
 172         }
 173       }
 174     }
 175   } else {
 176     assert(dep_implicit_context_arg(dept) == 0, "sanity");
 177     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 178       // look in this bucket for redundant assertions
 179       const int stride = 2;
 180       for (int i = deps->length(); (i -= stride) >= 0; ) {
 181         ciBaseObject* y0 = deps->at(i+0);
 182         ciBaseObject* y1 = deps->at(i+1);
 183         if (x0 == y0 && x1 == y1) {
 184           return;
 185         }
 186       }
 187     }
 188   }
 189 
 190   // append the assertion in the correct bucket:
 191   deps->append(x0);
 192   deps->append(x1);
 193 }
 194 
 195 void Dependencies::assert_common_3(DepType dept,
 196                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
 197   assert(dep_context_arg(dept) == 0, "sanity");
 198   assert(dep_args(dept) == 3, "sanity");
 199   log_dependency(dept, ctxk, x, x2);
 200   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 201 
 202   // try to normalize an unordered pair:
 203   bool swap = false;
 204   switch (dept) {
 205   case abstract_with_exclusive_concrete_subtypes_2:
 206     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
 207     break;
 208   case exclusive_concrete_methods_2:
 209     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
 210     break;
 211   }
 212   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
 213 
 214   // see if the same (or a similar) dep is already recorded
 215   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
 216     // look in this bucket for redundant assertions
 217     const int stride = 3;
 218     for (int i = deps->length(); (i -= stride) >= 0; ) {
 219       ciBaseObject* y  = deps->at(i+1);
 220       ciBaseObject* y2 = deps->at(i+2);
 221       if (x == y && x2 == y2) {  // same subjects; check the context
 222         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
 223           return;
 224         }
 225       }
 226     }
 227   }
 228   // append the assertion in the correct bucket:
 229   deps->append(ctxk);
 230   deps->append(x);
 231   deps->append(x2);
 232 }
 233 
 234 /// Support for encoding dependencies into an nmethod:
 235 
 236 void Dependencies::copy_to(nmethod* nm) {
 237   address beg = nm->dependencies_begin();
 238   address end = nm->dependencies_end();
 239   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
 240   Copy::disjoint_words((HeapWord*) content_bytes(),
 241                        (HeapWord*) beg,
 242                        size_in_bytes() / sizeof(HeapWord));
 243   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
 244 }
 245 
 246 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
 247   for (int i = 0; i < narg; i++) {
 248     int diff = p1[i]->ident() - p2[i]->ident();
 249     if (diff != 0)  return diff;
 250   }
 251   return 0;
 252 }
 253 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
 254 { return sort_dep(p1, p2, 1); }
 255 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
 256 { return sort_dep(p1, p2, 2); }
 257 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
 258 { return sort_dep(p1, p2, 3); }
 259 
 260 void Dependencies::sort_all_deps() {
 261   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 262     DepType dept = (DepType)deptv;
 263     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 264     if (deps->length() <= 1)  continue;
 265     switch (dep_args(dept)) {
 266     case 1: deps->sort(sort_dep_arg_1, 1); break;
 267     case 2: deps->sort(sort_dep_arg_2, 2); break;
 268     case 3: deps->sort(sort_dep_arg_3, 3); break;
 269     default: ShouldNotReachHere();
 270     }
 271   }
 272 }
 273 
 274 size_t Dependencies::estimate_size_in_bytes() {
 275   size_t est_size = 100;
 276   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 277     DepType dept = (DepType)deptv;
 278     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 279     est_size += deps->length()*2;  // tags and argument(s)
 280   }
 281   return est_size;
 282 }
 283 
 284 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
 285   switch (dept) {
 286   case abstract_with_exclusive_concrete_subtypes_2:
 287     return x->as_metadata()->as_klass();
 288   case unique_concrete_method:
 289   case exclusive_concrete_methods_2:
 290     return x->as_metadata()->as_method()->holder();
 291   }
 292   return NULL;  // let NULL be NULL
 293 }
 294 
 295 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
 296   assert(must_be_in_vm(), "raw oops here");
 297   switch (dept) {
 298   case abstract_with_exclusive_concrete_subtypes_2:
 299     assert(x->is_klass(), "sanity");
 300     return (Klass*) x;
 301   case unique_concrete_method:
 302   case exclusive_concrete_methods_2:
 303     assert(x->is_method(), "sanity");
 304     return ((Method*)x)->method_holder();
 305   }
 306   return NULL;  // let NULL be NULL
 307 }
 308 
 309 void Dependencies::encode_content_bytes() {
 310   sort_all_deps();
 311 
 312   // cast is safe, no deps can overflow INT_MAX
 313   CompressedWriteStream bytes((int)estimate_size_in_bytes());
 314 
 315   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 316     DepType dept = (DepType)deptv;
 317     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 318     if (deps->length() == 0)  continue;
 319     int stride = dep_args(dept);
 320     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 321     assert(stride > 0, "sanity");
 322     for (int i = 0; i < deps->length(); i += stride) {
 323       jbyte code_byte = (jbyte)dept;
 324       int skipj = -1;
 325       if (ctxkj >= 0 && ctxkj+1 < stride) {
 326         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
 327         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
 328         if (ctxk == ctxk_encoded_as_null(dept, x)) {
 329           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 330           code_byte |= default_context_type_bit;
 331         }
 332       }
 333       bytes.write_byte(code_byte);
 334       for (int j = 0; j < stride; j++) {
 335         if (j == skipj)  continue;
 336         ciBaseObject* v = deps->at(i+j);
 337         int idx;
 338         if (v->is_object()) {
 339           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
 340         } else {
 341           ciMetadata* meta = v->as_metadata();
 342           idx = _oop_recorder->find_index(meta->constant_encoding());
 343         }
 344         bytes.write_int(idx);
 345       }
 346     }
 347   }
 348 
 349   // write a sentinel byte to mark the end
 350   bytes.write_byte(end_marker);
 351 
 352   // round it out to a word boundary
 353   while (bytes.position() % sizeof(HeapWord) != 0) {
 354     bytes.write_byte(end_marker);
 355   }
 356 
 357   // check whether the dept byte encoding really works
 358   assert((jbyte)default_context_type_bit != 0, "byte overflow");
 359 
 360   _content_bytes = bytes.buffer();
 361   _size_in_bytes = bytes.position();
 362 }
 363 
 364 
 365 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
 366   "end_marker",
 367   "evol_method",
 368   "leaf_type",
 369   "abstract_with_unique_concrete_subtype",
 370   "abstract_with_no_concrete_subtype",
 371   "concrete_with_no_concrete_subtype",
 372   "unique_concrete_method",
 373   "abstract_with_exclusive_concrete_subtypes_2",
 374   "exclusive_concrete_methods_2",
 375   "no_finalizable_subclasses",
 376   "call_site_target_value"
 377 };
 378 
 379 int Dependencies::_dep_args[TYPE_LIMIT] = {
 380   -1,// end_marker
 381   1, // evol_method m
 382   1, // leaf_type ctxk
 383   2, // abstract_with_unique_concrete_subtype ctxk, k
 384   1, // abstract_with_no_concrete_subtype ctxk
 385   1, // concrete_with_no_concrete_subtype ctxk
 386   2, // unique_concrete_method ctxk, m
 387   3, // unique_concrete_subtypes_2 ctxk, k1, k2
 388   3, // unique_concrete_methods_2 ctxk, m1, m2
 389   1, // no_finalizable_subclasses ctxk
 390   2  // call_site_target_value call_site, method_handle
 391 };
 392 
 393 const char* Dependencies::dep_name(Dependencies::DepType dept) {
 394   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
 395   return _dep_name[dept];
 396 }
 397 
 398 int Dependencies::dep_args(Dependencies::DepType dept) {
 399   if (!dept_in_mask(dept, all_types))  return -1;
 400   return _dep_args[dept];
 401 }
 402 
 403 void Dependencies::check_valid_dependency_type(DepType dept) {
 404   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept));
 405 }
 406 
 407 // for the sake of the compiler log, print out current dependencies:
 408 void Dependencies::log_all_dependencies() {
 409   if (log() == NULL)  return;
 410   ResourceMark rm;
 411   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 412     DepType dept = (DepType)deptv;
 413     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 414     int deplen = deps->length();
 415     if (deplen == 0) {
 416       continue;
 417     }
 418     int stride = dep_args(dept);
 419     GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
 420     for (int i = 0; i < deps->length(); i += stride) {
 421       for (int j = 0; j < stride; j++) {
 422         // flush out the identities before printing
 423         ciargs->push(deps->at(i+j));
 424       }
 425       write_dependency_to(log(), dept, ciargs);
 426       ciargs->clear();
 427     }
 428     guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
 429   }
 430 }
 431 
 432 void Dependencies::write_dependency_to(CompileLog* log,
 433                                        DepType dept,
 434                                        GrowableArray<DepArgument>* args,
 435                                        Klass* witness) {
 436   if (log == NULL) {
 437     return;
 438   }
 439   ResourceMark rm;
 440   ciEnv* env = ciEnv::current();
 441   GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
 442   for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
 443     DepArgument arg = *it;
 444     if (arg.is_oop()) {
 445       ciargs->push(env->get_object(arg.oop_value()));
 446     } else {
 447       ciargs->push(env->get_metadata(arg.metadata_value()));
 448     }
 449   }
 450   int argslen = ciargs->length();
 451   Dependencies::write_dependency_to(log, dept, ciargs, witness);
 452   guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
 453 }
 454 
 455 void Dependencies::write_dependency_to(CompileLog* log,
 456                                        DepType dept,
 457                                        GrowableArray<ciBaseObject*>* args,
 458                                        Klass* witness) {
 459   if (log == NULL) {
 460     return;
 461   }
 462   ResourceMark rm;
 463   GrowableArray<int>* argids = new GrowableArray<int>(args->length());
 464   for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
 465     ciBaseObject* obj = *it;
 466     if (obj->is_object()) {
 467       argids->push(log->identify(obj->as_object()));
 468     } else {
 469       argids->push(log->identify(obj->as_metadata()));
 470     }
 471   }
 472   if (witness != NULL) {
 473     log->begin_elem("dependency_failed");
 474   } else {
 475     log->begin_elem("dependency");
 476   }
 477   log->print(" type='%s'", dep_name(dept));
 478   const int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 479   if (ctxkj >= 0 && ctxkj < argids->length()) {
 480     log->print(" ctxk='%d'", argids->at(ctxkj));
 481   }
 482   // write remaining arguments, if any.
 483   for (int j = 0; j < argids->length(); j++) {
 484     if (j == ctxkj)  continue;  // already logged
 485     if (j == 1) {
 486       log->print(  " x='%d'",    argids->at(j));
 487     } else {
 488       log->print(" x%d='%d'", j, argids->at(j));
 489     }
 490   }
 491   if (witness != NULL) {
 492     log->object("witness", witness);
 493     log->stamp();
 494   }
 495   log->end_elem();
 496 }
 497 
 498 void Dependencies::write_dependency_to(xmlStream* xtty,
 499                                        DepType dept,
 500                                        GrowableArray<DepArgument>* args,
 501                                        Klass* witness) {
 502   if (xtty == NULL) {
 503     return;
 504   }
 505   ResourceMark rm;
 506   ttyLocker ttyl;
 507   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 508   if (witness != NULL) {
 509     xtty->begin_elem("dependency_failed");
 510   } else {
 511     xtty->begin_elem("dependency");
 512   }
 513   xtty->print(" type='%s'", dep_name(dept));
 514   if (ctxkj >= 0) {
 515     xtty->object("ctxk", args->at(ctxkj).metadata_value());
 516   }
 517   // write remaining arguments, if any.
 518   for (int j = 0; j < args->length(); j++) {
 519     if (j == ctxkj)  continue;  // already logged
 520     DepArgument arg = args->at(j);
 521     if (j == 1) {
 522       if (arg.is_oop()) {
 523         xtty->object("x", arg.oop_value());
 524       } else {
 525         xtty->object("x", arg.metadata_value());
 526       }
 527     } else {
 528       char xn[10]; sprintf(xn, "x%d", j);
 529       if (arg.is_oop()) {
 530         xtty->object(xn, arg.oop_value());
 531       } else {
 532         xtty->object(xn, arg.metadata_value());
 533       }
 534     }
 535   }
 536   if (witness != NULL) {
 537     xtty->object("witness", witness);
 538     xtty->stamp();
 539   }
 540   xtty->end_elem();
 541 }
 542 
 543 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
 544                                     Klass* witness) {
 545   ResourceMark rm;
 546   ttyLocker ttyl;   // keep the following output all in one block
 547   tty->print_cr("%s of type %s",
 548                 (witness == NULL)? "Dependency": "Failed dependency",
 549                 dep_name(dept));
 550   // print arguments
 551   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 552   for (int j = 0; j < args->length(); j++) {
 553     DepArgument arg = args->at(j);
 554     bool put_star = false;
 555     if (arg.is_null())  continue;
 556     const char* what;
 557     if (j == ctxkj) {
 558       assert(arg.is_metadata(), "must be");
 559       what = "context";
 560       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
 561     } else if (arg.is_method()) {
 562       what = "method ";
 563       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value());
 564     } else if (arg.is_klass()) {
 565       what = "class  ";
 566     } else {
 567       what = "object ";
 568     }
 569     tty->print("  %s = %s", what, (put_star? "*": ""));
 570     if (arg.is_klass()) {
 571       tty->print("%s", ((Klass*)arg.metadata_value())->external_name());
 572     } else if (arg.is_method()) {
 573       ((Method*)arg.metadata_value())->print_value();
 574     } else if (arg.is_oop()) {
 575       arg.oop_value()->print_value_on(tty);
 576     } else {
 577       ShouldNotReachHere(); // Provide impl for this type.
 578     }
 579 
 580     tty->cr();
 581   }
 582   if (witness != NULL) {
 583     bool put_star = !Dependencies::is_concrete_klass(witness);
 584     tty->print_cr("  witness = %s%s",
 585                   (put_star? "*": ""),
 586                   witness->external_name());
 587   }
 588 }
 589 
 590 void Dependencies::DepStream::log_dependency(Klass* witness) {
 591   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
 592   ResourceMark rm;
 593   const int nargs = argument_count();
 594   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 595   for (int j = 0; j < nargs; j++) {
 596     if (type() == call_site_target_value) {
 597       args->push(argument_oop(j));
 598     } else {
 599       args->push(argument(j));
 600     }
 601   }
 602   int argslen = args->length();
 603   if (_deps != NULL && _deps->log() != NULL) {
 604     Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
 605   } else {
 606     Dependencies::write_dependency_to(xtty, type(), args, witness);
 607   }
 608   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 609 }
 610 
 611 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) {
 612   ResourceMark rm;
 613   int nargs = argument_count();
 614   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 615   for (int j = 0; j < nargs; j++) {
 616     if (type() == call_site_target_value) {
 617       args->push(argument_oop(j));
 618     } else {
 619       args->push(argument(j));
 620     }
 621   }
 622   int argslen = args->length();
 623   Dependencies::print_dependency(type(), args, witness);
 624   if (verbose) {
 625     if (_code != NULL) {
 626       tty->print("  code: ");
 627       _code->print_value_on(tty);
 628       tty->cr();
 629     }
 630   }
 631   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 632 }
 633 
 634 
 635 /// Dependency stream support (decodes dependencies from an nmethod):
 636 
 637 #ifdef ASSERT
 638 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
 639   assert(must_be_in_vm(), "raw oops here");
 640   _byte_limit = byte_limit;
 641   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
 642   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
 643 }
 644 #endif //ASSERT
 645 
 646 bool Dependencies::DepStream::next() {
 647   assert(_type != end_marker, "already at end");
 648   if (_bytes.position() == 0 && _code != NULL
 649       && _code->dependencies_size() == 0) {
 650     // Method has no dependencies at all.
 651     return false;
 652   }
 653   int code_byte = (_bytes.read_byte() & 0xFF);
 654   if (code_byte == end_marker) {
 655     DEBUG_ONLY(_type = end_marker);
 656     return false;
 657   } else {
 658     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
 659     code_byte -= ctxk_bit;
 660     DepType dept = (DepType)code_byte;
 661     _type = dept;
 662     Dependencies::check_valid_dependency_type(dept);
 663     int stride = _dep_args[dept];
 664     assert(stride == dep_args(dept), "sanity");
 665     int skipj = -1;
 666     if (ctxk_bit != 0) {
 667       skipj = 0;  // currently the only context argument is at zero
 668       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
 669     }
 670     for (int j = 0; j < stride; j++) {
 671       _xi[j] = (j == skipj)? 0: _bytes.read_int();
 672     }
 673     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
 674     return true;
 675   }
 676 }
 677 
 678 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
 679   Metadata* o = NULL;
 680   if (_code != NULL) {
 681     o = _code->metadata_at(i);
 682   } else {
 683     o = _deps->oop_recorder()->metadata_at(i);
 684   }
 685   return o;
 686 }
 687 
 688 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
 689   return (_code != NULL)
 690          ? _code->oop_at(i)
 691     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
 692 }
 693 
 694 Metadata* Dependencies::DepStream::argument(int i) {
 695   Metadata* result = recorded_metadata_at(argument_index(i));
 696 
 697   if (result == NULL) { // Explicit context argument can be compressed
 698     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 699     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
 700       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
 701     }
 702   }
 703 
 704   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
 705   return result;
 706 }
 707 
 708 /**
 709  * Returns a unique identifier for each dependency argument.
 710  */
 711 uintptr_t Dependencies::DepStream::get_identifier(int i) {
 712   if (has_oop_argument()) {
 713     return (uintptr_t)(oopDesc*)argument_oop(i);
 714   } else {
 715     return (uintptr_t)argument(i);
 716   }
 717 }
 718 
 719 oop Dependencies::DepStream::argument_oop(int i) {
 720   oop result = recorded_oop_at(argument_index(i));
 721   assert(result == NULL || result->is_oop(), "must be");
 722   return result;
 723 }
 724 
 725 Klass* Dependencies::DepStream::context_type() {
 726   assert(must_be_in_vm(), "raw oops here");
 727 
 728   // Most dependencies have an explicit context type argument.
 729   {
 730     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 731     if (ctxkj >= 0) {
 732       Metadata* k = argument(ctxkj);
 733       assert(k != NULL && k->is_klass(), "type check");
 734       return (Klass*)k;
 735     }
 736   }
 737 
 738   // Some dependencies are using the klass of the first object
 739   // argument as implicit context type (e.g. call_site_target_value).
 740   {
 741     int ctxkj = dep_implicit_context_arg(type());
 742     if (ctxkj >= 0) {
 743       Klass* k = argument_oop(ctxkj)->klass();
 744       assert(k != NULL && k->is_klass(), "type check");
 745       return (Klass*) k;
 746     }
 747   }
 748 
 749   // And some dependencies don't have a context type at all,
 750   // e.g. evol_method.
 751   return NULL;
 752 }
 753 
 754 // ----------------- DependencySignature --------------------------------------
 755 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
 756   if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
 757     return false;
 758   }
 759 
 760   for (int i = 0; i < s1.args_count(); i++) {
 761     if (s1.arg(i) != s2.arg(i)) {
 762       return false;
 763     }
 764   }
 765   return true;
 766 }
 767 
 768 /// Checking dependencies:
 769 
 770 // This hierarchy walker inspects subtypes of a given type,
 771 // trying to find a "bad" class which breaks a dependency.
 772 // Such a class is called a "witness" to the broken dependency.
 773 // While searching around, we ignore "participants", which
 774 // are already known to the dependency.
 775 class ClassHierarchyWalker {
 776  public:
 777   enum { PARTICIPANT_LIMIT = 3 };
 778 
 779  private:
 780   // optional method descriptor to check for:
 781   Symbol* _name;
 782   Symbol* _signature;
 783 
 784   // special classes which are not allowed to be witnesses:
 785   Klass*    _participants[PARTICIPANT_LIMIT+1];
 786   int       _num_participants;
 787 
 788   // cache of method lookups
 789   Method* _found_methods[PARTICIPANT_LIMIT+1];
 790 
 791   // if non-zero, tells how many witnesses to convert to participants
 792   int       _record_witnesses;
 793 
 794   void initialize(Klass* participant) {
 795     _record_witnesses = 0;
 796     _participants[0]  = participant;
 797     _found_methods[0] = NULL;
 798     _num_participants = 0;
 799     if (participant != NULL) {
 800       // Terminating NULL.
 801       _participants[1] = NULL;
 802       _found_methods[1] = NULL;
 803       _num_participants = 1;
 804     }
 805   }
 806 
 807   void initialize_from_method(Method* m) {
 808     assert(m != NULL && m->is_method(), "sanity");
 809     _name      = m->name();
 810     _signature = m->signature();
 811   }
 812 
 813  public:
 814   // The walker is initialized to recognize certain methods and/or types
 815   // as friendly participants.
 816   ClassHierarchyWalker(Klass* participant, Method* m) {
 817     initialize_from_method(m);
 818     initialize(participant);
 819   }
 820   ClassHierarchyWalker(Method* m) {
 821     initialize_from_method(m);
 822     initialize(NULL);
 823   }
 824   ClassHierarchyWalker(Klass* participant = NULL) {
 825     _name      = NULL;
 826     _signature = NULL;
 827     initialize(participant);
 828   }
 829 
 830   // This is common code for two searches:  One for concrete subtypes,
 831   // the other for concrete method implementations and overrides.
 832   bool doing_subtype_search() {
 833     return _name == NULL;
 834   }
 835 
 836   int num_participants() { return _num_participants; }
 837   Klass* participant(int n) {
 838     assert((uint)n <= (uint)_num_participants, "oob");
 839     return _participants[n];
 840   }
 841 
 842   // Note:  If n==num_participants, returns NULL.
 843   Method* found_method(int n) {
 844     assert((uint)n <= (uint)_num_participants, "oob");
 845     Method* fm = _found_methods[n];
 846     assert(n == _num_participants || fm != NULL, "proper usage");
 847     assert(fm == NULL || fm->method_holder() == _participants[n], "sanity");
 848     return fm;
 849   }
 850 
 851 #ifdef ASSERT
 852   // Assert that m is inherited into ctxk, without intervening overrides.
 853   // (May return true even if this is not true, in corner cases where we punt.)
 854   bool check_method_context(Klass* ctxk, Method* m) {
 855     if (m->method_holder() == ctxk)
 856       return true;  // Quick win.
 857     if (m->is_private())
 858       return false; // Quick lose.  Should not happen.
 859     if (!(m->is_public() || m->is_protected()))
 860       // The override story is complex when packages get involved.
 861       return true;  // Must punt the assertion to true.
 862     Klass* k = ctxk;
 863     Method* lm = k->lookup_method(m->name(), m->signature());
 864     if (lm == NULL && k->oop_is_instance()) {
 865       // It might be an interface method
 866         lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(),
 867                                                                 m->signature());
 868     }
 869     if (lm == m)
 870       // Method m is inherited into ctxk.
 871       return true;
 872     if (lm != NULL) {
 873       if (!(lm->is_public() || lm->is_protected())) {
 874         // Method is [package-]private, so the override story is complex.
 875         return true;  // Must punt the assertion to true.
 876       }
 877       if (lm->is_static()) {
 878         // Static methods don't override non-static so punt
 879         return true;
 880       }
 881       if (   !Dependencies::is_concrete_method(lm)
 882           && !Dependencies::is_concrete_method(m)
 883           && lm->method_holder()->is_subtype_of(m->method_holder()))
 884         // Method m is overridden by lm, but both are non-concrete.
 885         return true;
 886     }
 887     ResourceMark rm;
 888     tty->print_cr("Dependency method not found in the associated context:");
 889     tty->print_cr("  context = %s", ctxk->external_name());
 890     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
 891     if (lm != NULL) {
 892       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
 893     }
 894     return false;
 895   }
 896 #endif
 897 
 898   void add_participant(Klass* participant) {
 899     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
 900     int np = _num_participants++;
 901     _participants[np] = participant;
 902     _participants[np+1] = NULL;
 903     _found_methods[np+1] = NULL;
 904   }
 905 
 906   void record_witnesses(int add) {
 907     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
 908     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
 909     _record_witnesses = add;
 910   }
 911 
 912   bool is_witness(Klass* k) {
 913     if (doing_subtype_search()) {
 914       return Dependencies::is_concrete_klass(k);
 915     } else {
 916       Method* m = InstanceKlass::cast(k)->find_method(_name, _signature);
 917       if (m == NULL || !Dependencies::is_concrete_method(m))  return false;
 918       _found_methods[_num_participants] = m;
 919       // Note:  If add_participant(k) is called,
 920       // the method m will already be memoized for it.
 921       return true;
 922     }
 923   }
 924 
 925   bool is_participant(Klass* k) {
 926     if (k == _participants[0]) {
 927       return true;
 928     } else if (_num_participants <= 1) {
 929       return false;
 930     } else {
 931       return in_list(k, &_participants[1]);
 932     }
 933   }
 934   bool ignore_witness(Klass* witness) {
 935     if (_record_witnesses == 0) {
 936       return false;
 937     } else {
 938       --_record_witnesses;
 939       add_participant(witness);
 940       return true;
 941     }
 942   }
 943   static bool in_list(Klass* x, Klass** list) {
 944     for (int i = 0; ; i++) {
 945       Klass* y = list[i];
 946       if (y == NULL)  break;
 947       if (y == x)  return true;
 948     }
 949     return false;  // not in list
 950   }
 951 
 952  private:
 953   // the actual search method:
 954   Klass* find_witness_anywhere(Klass* context_type,
 955                                  bool participants_hide_witnesses,
 956                                  bool top_level_call = true);
 957   // the spot-checking version:
 958   Klass* find_witness_in(KlassDepChange& changes,
 959                          Klass* context_type,
 960                            bool participants_hide_witnesses);
 961  public:
 962   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
 963     assert(doing_subtype_search(), "must set up a subtype search");
 964     // When looking for unexpected concrete types,
 965     // do not look beneath expected ones.
 966     const bool participants_hide_witnesses = true;
 967     // CX > CC > C' is OK, even if C' is new.
 968     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
 969     if (changes != NULL) {
 970       return find_witness_in(*changes, context_type, participants_hide_witnesses);
 971     } else {
 972       return find_witness_anywhere(context_type, participants_hide_witnesses);
 973     }
 974   }
 975   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
 976     assert(!doing_subtype_search(), "must set up a method definer search");
 977     // When looking for unexpected concrete methods,
 978     // look beneath expected ones, to see if there are overrides.
 979     const bool participants_hide_witnesses = true;
 980     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
 981     if (changes != NULL) {
 982       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
 983     } else {
 984       return find_witness_anywhere(context_type, !participants_hide_witnesses);
 985     }
 986   }
 987 };
 988 
 989 #ifndef PRODUCT
 990 static int deps_find_witness_calls = 0;
 991 static int deps_find_witness_steps = 0;
 992 static int deps_find_witness_recursions = 0;
 993 static int deps_find_witness_singles = 0;
 994 static int deps_find_witness_print = 0; // set to -1 to force a final print
 995 static bool count_find_witness_calls() {
 996   if (TraceDependencies || LogCompilation) {
 997     int pcount = deps_find_witness_print + 1;
 998     bool final_stats      = (pcount == 0);
 999     bool initial_call     = (pcount == 1);
1000     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1001     if (pcount < 0)  pcount = 1; // crude overflow protection
1002     deps_find_witness_print = pcount;
1003     if (VerifyDependencies && initial_call) {
1004       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1005     }
1006     if (occasional_print || final_stats) {
1007       // Every now and then dump a little info about dependency searching.
1008       if (xtty != NULL) {
1009        ttyLocker ttyl;
1010        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1011                    deps_find_witness_calls,
1012                    deps_find_witness_steps,
1013                    deps_find_witness_recursions,
1014                    deps_find_witness_singles);
1015       }
1016       if (final_stats || (TraceDependencies && WizardMode)) {
1017         ttyLocker ttyl;
1018         tty->print_cr("Dependency check (find_witness) "
1019                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1020                       deps_find_witness_calls,
1021                       deps_find_witness_steps,
1022                       (double)deps_find_witness_steps / deps_find_witness_calls,
1023                       deps_find_witness_recursions,
1024                       deps_find_witness_singles);
1025       }
1026     }
1027     return true;
1028   }
1029   return false;
1030 }
1031 #else
1032 #define count_find_witness_calls() (0)
1033 #endif //PRODUCT
1034 
1035 
1036 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1037                                                Klass* context_type,
1038                                                bool participants_hide_witnesses) {
1039   assert(changes.involves_context(context_type), "irrelevant dependency");
1040   Klass* new_type = changes.new_type();
1041 
1042   (void)count_find_witness_calls();
1043   NOT_PRODUCT(deps_find_witness_singles++);
1044 
1045   // Current thread must be in VM (not native mode, as in CI):
1046   assert(must_be_in_vm(), "raw oops here");
1047   // Must not move the class hierarchy during this check:
1048   assert_locked_or_safepoint(Compile_lock);
1049 
1050   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1051   if (nof_impls > 1) {
1052     // Avoid this case: *I.m > { A.m, C }; B.m > C
1053     // %%% Until this is fixed more systematically, bail out.
1054     // See corresponding comment in find_witness_anywhere.
1055     return context_type;
1056   }
1057 
1058   assert(!is_participant(new_type), "only old classes are participants");
1059   if (participants_hide_witnesses) {
1060     // If the new type is a subtype of a participant, we are done.
1061     for (int i = 0; i < num_participants(); i++) {
1062       Klass* part = participant(i);
1063       if (part == NULL)  continue;
1064       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1065              "correct marking of participants, b/c new_type is unique");
1066       if (changes.involves_context(part)) {
1067         // new guy is protected from this check by previous participant
1068         return NULL;
1069       }
1070     }
1071   }
1072 
1073   if (is_witness(new_type) &&
1074       !ignore_witness(new_type)) {
1075     return new_type;
1076   }
1077 
1078   return NULL;
1079 }
1080 
1081 
1082 // Walk hierarchy under a context type, looking for unexpected types.
1083 // Do not report participant types, and recursively walk beneath
1084 // them only if participants_hide_witnesses is false.
1085 // If top_level_call is false, skip testing the context type,
1086 // because the caller has already considered it.
1087 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1088                                                      bool participants_hide_witnesses,
1089                                                      bool top_level_call) {
1090   // Current thread must be in VM (not native mode, as in CI):
1091   assert(must_be_in_vm(), "raw oops here");
1092   // Must not move the class hierarchy during this check:
1093   assert_locked_or_safepoint(Compile_lock);
1094 
1095   bool do_counts = count_find_witness_calls();
1096 
1097   // Check the root of the sub-hierarchy first.
1098   if (top_level_call) {
1099     if (do_counts) {
1100       NOT_PRODUCT(deps_find_witness_calls++);
1101       NOT_PRODUCT(deps_find_witness_steps++);
1102     }
1103     if (is_participant(context_type)) {
1104       if (participants_hide_witnesses)  return NULL;
1105       // else fall through to search loop...
1106     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1107       // The context is an abstract class or interface, to start with.
1108       return context_type;
1109     }
1110   }
1111 
1112   // Now we must check each implementor and each subclass.
1113   // Use a short worklist to avoid blowing the stack.
1114   // Each worklist entry is a *chain* of subklass siblings to process.
1115   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1116   Klass* chains[CHAINMAX];
1117   int    chaini = 0;  // index into worklist
1118   Klass* chain;       // scratch variable
1119 #define ADD_SUBCLASS_CHAIN(k)                     {  \
1120     assert(chaini < CHAINMAX, "oob");                \
1121     chain = InstanceKlass::cast(k)->subklass();      \
1122     if (chain != NULL)  chains[chaini++] = chain;    }
1123 
1124   // Look for non-abstract subclasses.
1125   // (Note:  Interfaces do not have subclasses.)
1126   ADD_SUBCLASS_CHAIN(context_type);
1127 
1128   // If it is an interface, search its direct implementors.
1129   // (Their subclasses are additional indirect implementors.
1130   // See InstanceKlass::add_implementor.)
1131   // (Note:  nof_implementors is always zero for non-interfaces.)
1132   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1133   if (nof_impls > 1) {
1134     // Avoid this case: *I.m > { A.m, C }; B.m > C
1135     // Here, I.m has 2 concrete implementations, but m appears unique
1136     // as A.m, because the search misses B.m when checking C.
1137     // The inherited method B.m was getting missed by the walker
1138     // when interface 'I' was the starting point.
1139     // %%% Until this is fixed more systematically, bail out.
1140     // (Old CHA had the same limitation.)
1141     return context_type;
1142   }
1143   if (nof_impls > 0) {
1144     Klass* impl = InstanceKlass::cast(context_type)->implementor();
1145     assert(impl != NULL, "just checking");
1146     // If impl is the same as the context_type, then more than one
1147     // implementor has seen. No exact info in this case.
1148     if (impl == context_type) {
1149       return context_type;  // report an inexact witness to this sad affair
1150     }
1151     if (do_counts)
1152       { NOT_PRODUCT(deps_find_witness_steps++); }
1153     if (is_participant(impl)) {
1154       if (!participants_hide_witnesses) {
1155         ADD_SUBCLASS_CHAIN(impl);
1156       }
1157     } else if (is_witness(impl) && !ignore_witness(impl)) {
1158       return impl;
1159     } else {
1160       ADD_SUBCLASS_CHAIN(impl);
1161     }
1162   }
1163 
1164   // Recursively process each non-trivial sibling chain.
1165   while (chaini > 0) {
1166     Klass* chain = chains[--chaini];
1167     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1168       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1169       if (is_participant(sub)) {
1170         if (participants_hide_witnesses)  continue;
1171         // else fall through to process this guy's subclasses
1172       } else if (is_witness(sub) && !ignore_witness(sub)) {
1173         return sub;
1174       }
1175       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1176         // Fast path.  (Partially disabled if VerifyDependencies.)
1177         ADD_SUBCLASS_CHAIN(sub);
1178       } else {
1179         // Worklist overflow.  Do a recursive call.  Should be rare.
1180         // The recursive call will have its own worklist, of course.
1181         // (Note that sub has already been tested, so that there is
1182         // no need for the recursive call to re-test.  That's handy,
1183         // since the recursive call sees sub as the context_type.)
1184         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1185         Klass* witness = find_witness_anywhere(sub,
1186                                                  participants_hide_witnesses,
1187                                                  /*top_level_call=*/ false);
1188         if (witness != NULL)  return witness;
1189       }
1190     }
1191   }
1192 
1193   // No witness found.  The dependency remains unbroken.
1194   return NULL;
1195 #undef ADD_SUBCLASS_CHAIN
1196 }
1197 
1198 
1199 bool Dependencies::is_concrete_klass(Klass* k) {
1200   if (k->is_abstract())  return false;
1201   // %%% We could treat classes which are concrete but
1202   // have not yet been instantiated as virtually abstract.
1203   // This would require a deoptimization barrier on first instantiation.
1204   //if (k->is_not_instantiated())  return false;
1205   return true;
1206 }
1207 
1208 bool Dependencies::is_concrete_method(Method* m) {
1209   // Statics are irrelevant to virtual call sites.
1210   if (m->is_static())  return false;
1211 
1212   // We could also return false if m does not yet appear to be
1213   // executed, if the VM version supports this distinction also.
1214   // Default methods are considered "concrete" as well.
1215   return !m->is_abstract() &&
1216          !m->is_overpass(); // error functions aren't concrete
1217 }
1218 
1219 
1220 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1221   if (k->is_interface())  return NULL;
1222   if (k->has_finalizer()) return k;
1223   k = k->subklass();
1224   while (k != NULL) {
1225     Klass* result = find_finalizable_subclass(k);
1226     if (result != NULL) return result;
1227     k = k->next_sibling();
1228   }
1229   return NULL;
1230 }
1231 
1232 
1233 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1234   if (k->is_abstract())  return false;
1235   // We could also return false if k does not yet appear to be
1236   // instantiated, if the VM version supports this distinction also.
1237   //if (k->is_not_instantiated())  return false;
1238   return true;
1239 }
1240 
1241 bool Dependencies::is_concrete_method(ciMethod* m) {
1242   // Statics are irrelevant to virtual call sites.
1243   if (m->is_static())  return false;
1244 
1245   // We could also return false if m does not yet appear to be
1246   // executed, if the VM version supports this distinction also.
1247   return !m->is_abstract();
1248 }
1249 
1250 
1251 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1252   return k->has_finalizable_subclass();
1253 }
1254 
1255 
1256 // Any use of the contents (bytecodes) of a method must be
1257 // marked by an "evol_method" dependency, if those contents
1258 // can change.  (Note: A method is always dependent on itself.)
1259 Klass* Dependencies::check_evol_method(Method* m) {
1260   assert(must_be_in_vm(), "raw oops here");
1261   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1262   // Or is there a now a breakpoint?
1263   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1264   if (m->is_old()
1265       || m->number_of_breakpoints() > 0) {
1266     return m->method_holder();
1267   } else {
1268     return NULL;
1269   }
1270 }
1271 
1272 // This is a strong assertion:  It is that the given type
1273 // has no subtypes whatever.  It is most useful for
1274 // optimizing checks on reflected types or on array types.
1275 // (Checks on types which are derived from real instances
1276 // can be optimized more strongly than this, because we
1277 // know that the checked type comes from a concrete type,
1278 // and therefore we can disregard abstract types.)
1279 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1280   assert(must_be_in_vm(), "raw oops here");
1281   assert_locked_or_safepoint(Compile_lock);
1282   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1283   Klass* sub = ctx->subklass();
1284   if (sub != NULL) {
1285     return sub;
1286   } else if (ctx->nof_implementors() != 0) {
1287     // if it is an interface, it must be unimplemented
1288     // (if it is not an interface, nof_implementors is always zero)
1289     Klass* impl = ctx->implementor();
1290     assert(impl != NULL, "must be set");
1291     return impl;
1292   } else {
1293     return NULL;
1294   }
1295 }
1296 
1297 // Test the assertion that conck is the only concrete subtype* of ctxk.
1298 // The type conck itself is allowed to have have further concrete subtypes.
1299 // This allows the compiler to narrow occurrences of ctxk by conck,
1300 // when dealing with the types of actual instances.
1301 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1302                                                                    Klass* conck,
1303                                                                    KlassDepChange* changes) {
1304   ClassHierarchyWalker wf(conck);
1305   return wf.find_witness_subtype(ctxk, changes);
1306 }
1307 
1308 // If a non-concrete class has no concrete subtypes, it is not (yet)
1309 // instantiatable.  This can allow the compiler to make some paths go
1310 // dead, if they are gated by a test of the type.
1311 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1312                                                                KlassDepChange* changes) {
1313   // Find any concrete subtype, with no participants:
1314   ClassHierarchyWalker wf;
1315   return wf.find_witness_subtype(ctxk, changes);
1316 }
1317 
1318 
1319 // If a concrete class has no concrete subtypes, it can always be
1320 // exactly typed.  This allows the use of a cheaper type test.
1321 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1322                                                                KlassDepChange* changes) {
1323   // Find any concrete subtype, with only the ctxk as participant:
1324   ClassHierarchyWalker wf(ctxk);
1325   return wf.find_witness_subtype(ctxk, changes);
1326 }
1327 
1328 
1329 // Find the unique concrete proper subtype of ctxk, or NULL if there
1330 // is more than one concrete proper subtype.  If there are no concrete
1331 // proper subtypes, return ctxk itself, whether it is concrete or not.
1332 // The returned subtype is allowed to have have further concrete subtypes.
1333 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1334 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1335   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1336   wf.record_witnesses(1);          // Record one other witness when walking.
1337   Klass* wit = wf.find_witness_subtype(ctxk);
1338   if (wit != NULL)  return NULL;   // Too many witnesses.
1339   Klass* conck = wf.participant(0);
1340   if (conck == NULL) {
1341 #ifndef PRODUCT
1342     // Make sure the dependency mechanism will pass this discovery:
1343     if (VerifyDependencies) {
1344       // Turn off dependency tracing while actually testing deps.
1345       FlagSetting fs(TraceDependencies, false);
1346       if (!Dependencies::is_concrete_klass(ctxk)) {
1347         guarantee(NULL ==
1348                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
1349                   "verify dep.");
1350       } else {
1351         guarantee(NULL ==
1352                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
1353                   "verify dep.");
1354       }
1355     }
1356 #endif //PRODUCT
1357     return ctxk;                   // Return ctxk as a flag for "no subtypes".
1358   } else {
1359 #ifndef PRODUCT
1360     // Make sure the dependency mechanism will pass this discovery:
1361     if (VerifyDependencies) {
1362       // Turn off dependency tracing while actually testing deps.
1363       FlagSetting fs(TraceDependencies, false);
1364       if (!Dependencies::is_concrete_klass(ctxk)) {
1365         guarantee(NULL == (void *)
1366                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
1367                   "verify dep.");
1368       }
1369     }
1370 #endif //PRODUCT
1371     return conck;
1372   }
1373 }
1374 
1375 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1376 // except possibly for further subtypes of k[12] themselves.
1377 // The context type must be abstract.  The types k1 and k2 are themselves
1378 // allowed to have further concrete subtypes.
1379 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1380                                                 Klass* ctxk,
1381                                                 Klass* k1,
1382                                                 Klass* k2,
1383                                                 KlassDepChange* changes) {
1384   ClassHierarchyWalker wf;
1385   wf.add_participant(k1);
1386   wf.add_participant(k2);
1387   return wf.find_witness_subtype(ctxk, changes);
1388 }
1389 
1390 // Search ctxk for concrete implementations.  If there are klen or fewer,
1391 // pack them into the given array and return the number.
1392 // Otherwise, return -1, meaning the given array would overflow.
1393 // (Note that a return of 0 means there are exactly no concrete subtypes.)
1394 // In this search, if ctxk is concrete, it will be reported alone.
1395 // For any type CC reported, no proper subtypes of CC will be reported.
1396 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1397                                                    int klen,
1398                                                    Klass* karray[]) {
1399   ClassHierarchyWalker wf;
1400   wf.record_witnesses(klen);
1401   Klass* wit = wf.find_witness_subtype(ctxk);
1402   if (wit != NULL)  return -1;  // Too many witnesses.
1403   int num = wf.num_participants();
1404   assert(num <= klen, "oob");
1405   // Pack the result array with the good news.
1406   for (int i = 0; i < num; i++)
1407     karray[i] = wf.participant(i);
1408 #ifndef PRODUCT
1409   // Make sure the dependency mechanism will pass this discovery:
1410   if (VerifyDependencies) {
1411     // Turn off dependency tracing while actually testing deps.
1412     FlagSetting fs(TraceDependencies, false);
1413     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1414     case -1: // ctxk was itself concrete
1415       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1416       break;
1417     case 0:
1418       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1419                 "verify dep.");
1420       break;
1421     case 1:
1422       guarantee(NULL == (void *)
1423                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1424                 "verify dep.");
1425       break;
1426     case 2:
1427       guarantee(NULL == (void *)
1428                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
1429                                                                 karray[0],
1430                                                                 karray[1]),
1431                 "verify dep.");
1432       break;
1433     default:
1434       ShouldNotReachHere();  // klen > 2 yet supported
1435     }
1436   }
1437 #endif //PRODUCT
1438   return num;
1439 }
1440 
1441 // If a class (or interface) has a unique concrete method uniqm, return NULL.
1442 // Otherwise, return a class that contains an interfering method.
1443 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1444                                                     KlassDepChange* changes) {
1445   // Here is a missing optimization:  If uniqm->is_final(),
1446   // we don't really need to search beneath it for overrides.
1447   // This is probably not important, since we don't use dependencies
1448   // to track final methods.  (They can't be "definalized".)
1449   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1450   return wf.find_witness_definer(ctxk, changes);
1451 }
1452 
1453 // Find the set of all non-abstract methods under ctxk that match m.
1454 // (The method m must be defined or inherited in ctxk.)
1455 // Include m itself in the set, unless it is abstract.
1456 // If this set has exactly one element, return that element.
1457 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1458   // Return NULL if m is marked old; must have been a redefined method.
1459   if (m->is_old()) {
1460     return NULL;
1461   }
1462   ClassHierarchyWalker wf(m);
1463   assert(wf.check_method_context(ctxk, m), "proper context");
1464   wf.record_witnesses(1);
1465   Klass* wit = wf.find_witness_definer(ctxk);
1466   if (wit != NULL)  return NULL;  // Too many witnesses.
1467   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1468   if (Dependencies::is_concrete_method(m)) {
1469     if (fm == NULL) {
1470       // It turns out that m was always the only implementation.
1471       fm = m;
1472     } else if (fm != m) {
1473       // Two conflicting implementations after all.
1474       // (This can happen if m is inherited into ctxk and fm overrides it.)
1475       return NULL;
1476     }
1477   }
1478 #ifndef PRODUCT
1479   // Make sure the dependency mechanism will pass this discovery:
1480   if (VerifyDependencies && fm != NULL) {
1481     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1482               "verify dep.");
1483   }
1484 #endif //PRODUCT
1485   return fm;
1486 }
1487 
1488 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1489                                                         Method* m1,
1490                                                         Method* m2,
1491                                                         KlassDepChange* changes) {
1492   ClassHierarchyWalker wf(m1);
1493   wf.add_participant(m1->method_holder());
1494   wf.add_participant(m2->method_holder());
1495   return wf.find_witness_definer(ctxk, changes);
1496 }
1497 
1498 // Find the set of all non-abstract methods under ctxk that match m[0].
1499 // (The method m[0] must be defined or inherited in ctxk.)
1500 // Include m itself in the set, unless it is abstract.
1501 // Fill the given array m[0..(mlen-1)] with this set, and return the length.
1502 // (The length may be zero if no concrete methods are found anywhere.)
1503 // If there are too many concrete methods to fit in marray, return -1.
1504 int Dependencies::find_exclusive_concrete_methods(Klass* ctxk,
1505                                                   int mlen,
1506                                                   Method* marray[]) {
1507   Method* m0 = marray[0];
1508   ClassHierarchyWalker wf(m0);
1509   assert(wf.check_method_context(ctxk, m0), "proper context");
1510   wf.record_witnesses(mlen);
1511   bool participants_hide_witnesses = true;
1512   Klass* wit = wf.find_witness_definer(ctxk);
1513   if (wit != NULL)  return -1;  // Too many witnesses.
1514   int num = wf.num_participants();
1515   assert(num <= mlen, "oob");
1516   // Keep track of whether m is also part of the result set.
1517   int mfill = 0;
1518   assert(marray[mfill] == m0, "sanity");
1519   if (Dependencies::is_concrete_method(m0))
1520     mfill++;  // keep m0 as marray[0], the first result
1521   for (int i = 0; i < num; i++) {
1522     Method* fm = wf.found_method(i);
1523     if (fm == m0)  continue;  // Already put this guy in the list.
1524     if (mfill == mlen) {
1525       return -1;              // Oops.  Too many methods after all!
1526     }
1527     marray[mfill++] = fm;
1528   }
1529 #ifndef PRODUCT
1530   // Make sure the dependency mechanism will pass this discovery:
1531   if (VerifyDependencies) {
1532     // Turn off dependency tracing while actually testing deps.
1533     FlagSetting fs(TraceDependencies, false);
1534     switch (mfill) {
1535     case 1:
1536       guarantee(NULL == (void *)check_unique_concrete_method(ctxk, marray[0]),
1537                 "verify dep.");
1538       break;
1539     case 2:
1540       guarantee(NULL == (void *)
1541                 check_exclusive_concrete_methods(ctxk, marray[0], marray[1]),
1542                 "verify dep.");
1543       break;
1544     default:
1545       ShouldNotReachHere();  // mlen > 2 yet supported
1546     }
1547   }
1548 #endif //PRODUCT
1549   return mfill;
1550 }
1551 
1552 
1553 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1554   Klass* search_at = ctxk;
1555   if (changes != NULL)
1556     search_at = changes->new_type(); // just look at the new bit
1557   return find_finalizable_subclass(search_at);
1558 }
1559 
1560 
1561 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1562   assert(call_site    ->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1563   assert(method_handle->is_a(SystemDictionary::MethodHandle_klass()), "sanity");
1564   if (changes == NULL) {
1565     // Validate all CallSites
1566     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1567       return call_site->klass();  // assertion failed
1568   } else {
1569     // Validate the given CallSite
1570     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1571       assert(method_handle != changes->method_handle(), "must be");
1572       return call_site->klass();  // assertion failed
1573     }
1574   }
1575   return NULL;  // assertion still valid
1576 }
1577 
1578 
1579 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1580   if (witness != NULL) {
1581     if (TraceDependencies) {
1582       print_dependency(witness, /*verbose=*/ true);
1583     }
1584     // The following is a no-op unless logging is enabled:
1585     log_dependency(witness);
1586   }
1587 }
1588 
1589 
1590 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1591   assert_locked_or_safepoint(Compile_lock);
1592   Dependencies::check_valid_dependency_type(type());
1593 
1594   Klass* witness = NULL;
1595   switch (type()) {
1596   case evol_method:
1597     witness = check_evol_method(method_argument(0));
1598     break;
1599   case leaf_type:
1600     witness = check_leaf_type(context_type());
1601     break;
1602   case abstract_with_unique_concrete_subtype:
1603     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1604     break;
1605   case abstract_with_no_concrete_subtype:
1606     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1607     break;
1608   case concrete_with_no_concrete_subtype:
1609     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1610     break;
1611   case unique_concrete_method:
1612     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1613     break;
1614   case abstract_with_exclusive_concrete_subtypes_2:
1615     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1616     break;
1617   case exclusive_concrete_methods_2:
1618     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1619     break;
1620   case no_finalizable_subclasses:
1621     witness = check_has_no_finalizable_subclasses(context_type(), changes);
1622     break;
1623   default:
1624     witness = NULL;
1625     break;
1626   }
1627   trace_and_log_witness(witness);
1628   return witness;
1629 }
1630 
1631 
1632 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1633   assert_locked_or_safepoint(Compile_lock);
1634   Dependencies::check_valid_dependency_type(type());
1635 
1636   Klass* witness = NULL;
1637   switch (type()) {
1638   case call_site_target_value:
1639     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1640     break;
1641   default:
1642     witness = NULL;
1643     break;
1644   }
1645   trace_and_log_witness(witness);
1646   return witness;
1647 }
1648 
1649 
1650 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1651   // Handle klass dependency
1652   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1653     return check_klass_dependency(changes.as_klass_change());
1654 
1655   // Handle CallSite dependency
1656   if (changes.is_call_site_change())
1657     return check_call_site_dependency(changes.as_call_site_change());
1658 
1659   // irrelevant dependency; skip it
1660   return NULL;
1661 }
1662 
1663 
1664 void DepChange::print() {
1665   int nsup = 0, nint = 0;
1666   for (ContextStream str(*this); str.next(); ) {
1667     Klass* k = str.klass();
1668     switch (str.change_type()) {
1669     case Change_new_type:
1670       tty->print_cr("  dependee = %s", InstanceKlass::cast(k)->external_name());
1671       break;
1672     case Change_new_sub:
1673       if (!WizardMode) {
1674         ++nsup;
1675       } else {
1676         tty->print_cr("  context super = %s", InstanceKlass::cast(k)->external_name());
1677       }
1678       break;
1679     case Change_new_impl:
1680       if (!WizardMode) {
1681         ++nint;
1682       } else {
1683         tty->print_cr("  context interface = %s", InstanceKlass::cast(k)->external_name());
1684       }
1685       break;
1686     }
1687   }
1688   if (nsup + nint != 0) {
1689     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1690   }
1691 }
1692 
1693 void DepChange::ContextStream::start() {
1694   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1695   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1696   _klass = new_type;
1697   _ti_base = NULL;
1698   _ti_index = 0;
1699   _ti_limit = 0;
1700 }
1701 
1702 bool DepChange::ContextStream::next() {
1703   switch (_change_type) {
1704   case Start_Klass:             // initial state; _klass is the new type
1705     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1706     _ti_index = 0;
1707     _change_type = Change_new_type;
1708     return true;
1709   case Change_new_type:
1710     // fall through:
1711     _change_type = Change_new_sub;
1712   case Change_new_sub:
1713     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1714     {
1715       _klass = InstanceKlass::cast(_klass)->super();
1716       if (_klass != NULL) {
1717         return true;
1718       }
1719     }
1720     // else set up _ti_limit and fall through:
1721     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1722     _change_type = Change_new_impl;
1723   case Change_new_impl:
1724     if (_ti_index < _ti_limit) {
1725       _klass = _ti_base->at(_ti_index++);
1726       return true;
1727     }
1728     // fall through:
1729     _change_type = NO_CHANGE;  // iterator is exhausted
1730   case NO_CHANGE:
1731     break;
1732   default:
1733     ShouldNotReachHere();
1734   }
1735   return false;
1736 }
1737 
1738 void KlassDepChange::initialize() {
1739   // entire transaction must be under this lock:
1740   assert_lock_strong(Compile_lock);
1741 
1742   // Mark all dependee and all its superclasses
1743   // Mark transitive interfaces
1744   for (ContextStream str(*this); str.next(); ) {
1745     Klass* d = str.klass();
1746     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1747     InstanceKlass::cast(d)->set_is_marked_dependent(true);
1748   }
1749 }
1750 
1751 KlassDepChange::~KlassDepChange() {
1752   // Unmark all dependee and all its superclasses
1753   // Unmark transitive interfaces
1754   for (ContextStream str(*this); str.next(); ) {
1755     Klass* d = str.klass();
1756     InstanceKlass::cast(d)->set_is_marked_dependent(false);
1757   }
1758 }
1759 
1760 bool KlassDepChange::involves_context(Klass* k) {
1761   if (k == NULL || !k->oop_is_instance()) {
1762     return false;
1763   }
1764   InstanceKlass* ik = InstanceKlass::cast(k);
1765   bool is_contained = ik->is_marked_dependent();
1766   assert(is_contained == new_type()->is_subtype_of(k),
1767          "correct marking of potential context types");
1768   return is_contained;
1769 }
1770 
1771 #ifndef PRODUCT
1772 void Dependencies::print_statistics() {
1773   if (deps_find_witness_print != 0) {
1774     // Call one final time, to flush out the data.
1775     deps_find_witness_print = -1;
1776     count_find_witness_calls();
1777   }
1778 }
1779 #endif