1 /*
   2  * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciArrayKlass.hpp"
  27 #include "ci/ciEnv.hpp"
  28 #include "ci/ciKlass.hpp"
  29 #include "ci/ciMethod.hpp"
  30 #include "code/dependencies.hpp"
  31 #include "compiler/compileLog.hpp"
  32 #include "oops/oop.inline.hpp"
  33 #include "runtime/handles.hpp"
  34 #include "runtime/handles.inline.hpp"
  35 #include "utilities/copy.hpp"
  36 
  37 
  38 #ifdef ASSERT
  39 static bool must_be_in_vm() {
  40   Thread* thread = Thread::current();
  41   if (thread->is_Java_thread())
  42     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  43   else
  44     return true;  //something like this: thread->is_VM_thread();
  45 }
  46 #endif //ASSERT
  47 
  48 void Dependencies::initialize(ciEnv* env) {
  49   Arena* arena = env->arena();
  50   _oop_recorder = env->oop_recorder();
  51   _log = env->log();
  52   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  53   DEBUG_ONLY(_deps[end_marker] = NULL);
  54   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  55     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  56   }
  57   _content_bytes = NULL;
  58   _size_in_bytes = (size_t)-1;
  59 
  60   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  61 }
  62 
  63 void Dependencies::assert_evol_method(ciMethod* m) {
  64   assert_common_1(evol_method, m);
  65 }
  66 
  67 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  68   if (ctxk->is_array_klass()) {
  69     // As a special case, support this assertion on an array type,
  70     // which reduces to an assertion on its element type.
  71     // Note that this cannot be done with assertions that
  72     // relate to concreteness or abstractness.
  73     ciType* elemt = ctxk->as_array_klass()->base_element_type();
  74     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
  75     ctxk = elemt->as_instance_klass();
  76     //if (ctxk->is_final())  return;            // Ex:  String[][]
  77   }
  78   check_ctxk(ctxk);
  79   assert_common_1(leaf_type, ctxk);
  80 }
  81 
  82 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  83   check_ctxk_abstract(ctxk);
  84   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  85 }
  86 
  87 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
  88   check_ctxk_abstract(ctxk);
  89   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
  90 }
  91 
  92 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
  93   check_ctxk_concrete(ctxk);
  94   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
  95 }
  96 
  97 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
  98   check_ctxk(ctxk);
  99   assert_common_2(unique_concrete_method, ctxk, uniqm);
 100 }
 101 
 102 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
 103   check_ctxk(ctxk);
 104   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
 105 }
 106 
 107 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
 108   check_ctxk(ctxk);
 109   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
 110 }
 111 
 112 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
 113   check_ctxk(ctxk);
 114   assert_common_1(no_finalizable_subclasses, ctxk);
 115 }
 116 
 117 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
 118   check_ctxk(call_site->klass());
 119   assert_common_2(call_site_target_value, call_site, method_handle);
 120 }
 121 
 122 // Helper function.  If we are adding a new dep. under ctxk2,
 123 // try to find an old dep. under a broader* ctxk1.  If there is
 124 //
 125 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
 126                                     int ctxk_i, ciKlass* ctxk2) {
 127   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
 128   if (ctxk2->is_subtype_of(ctxk1)) {
 129     return true;  // success, and no need to change
 130   } else if (ctxk1->is_subtype_of(ctxk2)) {
 131     // new context class fully subsumes previous one
 132     deps->at_put(ctxk_i, ctxk2);
 133     return true;
 134   } else {
 135     return false;
 136   }
 137 }
 138 
 139 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
 140   assert(dep_args(dept) == 1, "sanity");
 141   log_dependency(dept, x);
 142   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 143 
 144   // see if the same (or a similar) dep is already recorded
 145   if (note_dep_seen(dept, x)) {
 146     assert(deps->find(x) >= 0, "sanity");
 147   } else {
 148     deps->append(x);
 149   }
 150 }
 151 
 152 void Dependencies::assert_common_2(DepType dept,
 153                                    ciBaseObject* x0, ciBaseObject* x1) {
 154   assert(dep_args(dept) == 2, "sanity");
 155   log_dependency(dept, x0, x1);
 156   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 157 
 158   // see if the same (or a similar) dep is already recorded
 159   bool has_ctxk = has_explicit_context_arg(dept);
 160   if (has_ctxk) {
 161     assert(dep_context_arg(dept) == 0, "sanity");
 162     if (note_dep_seen(dept, x1)) {
 163       // look in this bucket for redundant assertions
 164       const int stride = 2;
 165       for (int i = deps->length(); (i -= stride) >= 0; ) {
 166         ciBaseObject* y1 = deps->at(i+1);
 167         if (x1 == y1) {  // same subject; check the context
 168           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
 169             return;
 170           }
 171         }
 172       }
 173     }
 174   } else {
 175     assert(dep_implicit_context_arg(dept) == 0, "sanity");
 176     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 177       // look in this bucket for redundant assertions
 178       const int stride = 2;
 179       for (int i = deps->length(); (i -= stride) >= 0; ) {
 180         ciBaseObject* y0 = deps->at(i+0);
 181         ciBaseObject* y1 = deps->at(i+1);
 182         if (x0 == y0 && x1 == y1) {
 183           return;
 184         }
 185       }
 186     }
 187   }
 188 
 189   // append the assertion in the correct bucket:
 190   deps->append(x0);
 191   deps->append(x1);
 192 }
 193 
 194 void Dependencies::assert_common_3(DepType dept,
 195                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
 196   assert(dep_context_arg(dept) == 0, "sanity");
 197   assert(dep_args(dept) == 3, "sanity");
 198   log_dependency(dept, ctxk, x, x2);
 199   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 200 
 201   // try to normalize an unordered pair:
 202   bool swap = false;
 203   switch (dept) {
 204   case abstract_with_exclusive_concrete_subtypes_2:
 205     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
 206     break;
 207   case exclusive_concrete_methods_2:
 208     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
 209     break;
 210   }
 211   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
 212 
 213   // see if the same (or a similar) dep is already recorded
 214   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
 215     // look in this bucket for redundant assertions
 216     const int stride = 3;
 217     for (int i = deps->length(); (i -= stride) >= 0; ) {
 218       ciBaseObject* y  = deps->at(i+1);
 219       ciBaseObject* y2 = deps->at(i+2);
 220       if (x == y && x2 == y2) {  // same subjects; check the context
 221         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
 222           return;
 223         }
 224       }
 225     }
 226   }
 227   // append the assertion in the correct bucket:
 228   deps->append(ctxk);
 229   deps->append(x);
 230   deps->append(x2);
 231 }
 232 
 233 /// Support for encoding dependencies into an nmethod:
 234 
 235 void Dependencies::copy_to(nmethod* nm) {
 236   address beg = nm->dependencies_begin();
 237   address end = nm->dependencies_end();
 238   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
 239   Copy::disjoint_words((HeapWord*) content_bytes(),
 240                        (HeapWord*) beg,
 241                        size_in_bytes() / sizeof(HeapWord));
 242   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
 243 }
 244 
 245 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
 246   for (int i = 0; i < narg; i++) {
 247     int diff = p1[i]->ident() - p2[i]->ident();
 248     if (diff != 0)  return diff;
 249   }
 250   return 0;
 251 }
 252 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
 253 { return sort_dep(p1, p2, 1); }
 254 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
 255 { return sort_dep(p1, p2, 2); }
 256 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
 257 { return sort_dep(p1, p2, 3); }
 258 
 259 void Dependencies::sort_all_deps() {
 260   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 261     DepType dept = (DepType)deptv;
 262     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 263     if (deps->length() <= 1)  continue;
 264     switch (dep_args(dept)) {
 265     case 1: deps->sort(sort_dep_arg_1, 1); break;
 266     case 2: deps->sort(sort_dep_arg_2, 2); break;
 267     case 3: deps->sort(sort_dep_arg_3, 3); break;
 268     default: ShouldNotReachHere();
 269     }
 270   }
 271 }
 272 
 273 size_t Dependencies::estimate_size_in_bytes() {
 274   size_t est_size = 100;
 275   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 276     DepType dept = (DepType)deptv;
 277     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 278     est_size += deps->length()*2;  // tags and argument(s)
 279   }
 280   return est_size;
 281 }
 282 
 283 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
 284   switch (dept) {
 285   case abstract_with_exclusive_concrete_subtypes_2:
 286     return x->as_metadata()->as_klass();
 287   case unique_concrete_method:
 288   case exclusive_concrete_methods_2:
 289     return x->as_metadata()->as_method()->holder();
 290   }
 291   return NULL;  // let NULL be NULL
 292 }
 293 
 294 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
 295   assert(must_be_in_vm(), "raw oops here");
 296   switch (dept) {
 297   case abstract_with_exclusive_concrete_subtypes_2:
 298     assert(x->is_klass(), "sanity");
 299     return (Klass*) x;
 300   case unique_concrete_method:
 301   case exclusive_concrete_methods_2:
 302     assert(x->is_method(), "sanity");
 303     return ((Method*)x)->method_holder();
 304   }
 305   return NULL;  // let NULL be NULL
 306 }
 307 
 308 void Dependencies::encode_content_bytes() {
 309   sort_all_deps();
 310 
 311   // cast is safe, no deps can overflow INT_MAX
 312   CompressedWriteStream bytes((int)estimate_size_in_bytes());
 313 
 314   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 315     DepType dept = (DepType)deptv;
 316     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 317     if (deps->length() == 0)  continue;
 318     int stride = dep_args(dept);
 319     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 320     assert(stride > 0, "sanity");
 321     for (int i = 0; i < deps->length(); i += stride) {
 322       jbyte code_byte = (jbyte)dept;
 323       int skipj = -1;
 324       if (ctxkj >= 0 && ctxkj+1 < stride) {
 325         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
 326         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
 327         if (ctxk == ctxk_encoded_as_null(dept, x)) {
 328           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 329           code_byte |= default_context_type_bit;
 330         }
 331       }
 332       bytes.write_byte(code_byte);
 333       for (int j = 0; j < stride; j++) {
 334         if (j == skipj)  continue;
 335         ciBaseObject* v = deps->at(i+j);
 336         int idx;
 337         if (v->is_object()) {
 338           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
 339         } else {
 340           ciMetadata* meta = v->as_metadata();
 341           idx = _oop_recorder->find_index(meta->constant_encoding());
 342         }
 343         bytes.write_int(idx);
 344       }
 345     }
 346   }
 347 
 348   // write a sentinel byte to mark the end
 349   bytes.write_byte(end_marker);
 350 
 351   // round it out to a word boundary
 352   while (bytes.position() % sizeof(HeapWord) != 0) {
 353     bytes.write_byte(end_marker);
 354   }
 355 
 356   // check whether the dept byte encoding really works
 357   assert((jbyte)default_context_type_bit != 0, "byte overflow");
 358 
 359   _content_bytes = bytes.buffer();
 360   _size_in_bytes = bytes.position();
 361 }
 362 
 363 
 364 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
 365   "end_marker",
 366   "evol_method",
 367   "leaf_type",
 368   "abstract_with_unique_concrete_subtype",
 369   "abstract_with_no_concrete_subtype",
 370   "concrete_with_no_concrete_subtype",
 371   "unique_concrete_method",
 372   "abstract_with_exclusive_concrete_subtypes_2",
 373   "exclusive_concrete_methods_2",
 374   "no_finalizable_subclasses",
 375   "call_site_target_value"
 376 };
 377 
 378 int Dependencies::_dep_args[TYPE_LIMIT] = {
 379   -1,// end_marker
 380   1, // evol_method m
 381   1, // leaf_type ctxk
 382   2, // abstract_with_unique_concrete_subtype ctxk, k
 383   1, // abstract_with_no_concrete_subtype ctxk
 384   1, // concrete_with_no_concrete_subtype ctxk
 385   2, // unique_concrete_method ctxk, m
 386   3, // unique_concrete_subtypes_2 ctxk, k1, k2
 387   3, // unique_concrete_methods_2 ctxk, m1, m2
 388   1, // no_finalizable_subclasses ctxk
 389   2  // call_site_target_value call_site, method_handle
 390 };
 391 
 392 const char* Dependencies::dep_name(Dependencies::DepType dept) {
 393   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
 394   return _dep_name[dept];
 395 }
 396 
 397 int Dependencies::dep_args(Dependencies::DepType dept) {
 398   if (!dept_in_mask(dept, all_types))  return -1;
 399   return _dep_args[dept];
 400 }
 401 
 402 void Dependencies::check_valid_dependency_type(DepType dept) {
 403   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept));
 404 }
 405 
 406 // for the sake of the compiler log, print out current dependencies:
 407 void Dependencies::log_all_dependencies() {
 408   if (log() == NULL)  return;
 409   ciBaseObject* args[max_arg_count];
 410   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 411     DepType dept = (DepType)deptv;
 412     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 413     if (deps->length() == 0)  continue;
 414     int stride = dep_args(dept);
 415     for (int i = 0; i < deps->length(); i += stride) {
 416       for (int j = 0; j < stride; j++) {
 417         // flush out the identities before printing
 418         args[j] = deps->at(i+j);
 419       }
 420       write_dependency_to(log(), dept, stride, args);
 421     }
 422   }
 423 }
 424 
 425 void Dependencies::write_dependency_to(CompileLog* log,
 426                                        DepType dept,
 427                                        int nargs, DepArgument args[],
 428                                        Klass* witness) {
 429   if (log == NULL) {
 430     return;
 431   }
 432   ciEnv* env = ciEnv::current();
 433   ciBaseObject* ciargs[max_arg_count];
 434   assert(nargs <= max_arg_count, "oob");
 435   for (int j = 0; j < nargs; j++) {
 436     if (args[j].is_oop()) {
 437       ciargs[j] = env->get_object(args[j].oop_value());
 438     } else {
 439       ciargs[j] = env->get_metadata(args[j].metadata_value());
 440     }
 441   }
 442   Dependencies::write_dependency_to(log, dept, nargs, ciargs, witness);
 443 }
 444 
 445 void Dependencies::write_dependency_to(CompileLog* log,
 446                                        DepType dept,
 447                                        int nargs, ciBaseObject* args[],
 448                                        Klass* witness) {
 449   if (log == NULL)  return;
 450   assert(nargs <= max_arg_count, "oob");
 451   int argids[max_arg_count];
 452   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 453   int j;
 454   for (j = 0; j < nargs; j++) {
 455     if (args[j]->is_object()) {
 456       argids[j] = log->identify(args[j]->as_object());
 457     } else {
 458       argids[j] = log->identify(args[j]->as_metadata());
 459     }
 460   }
 461   if (witness != NULL) {
 462     log->begin_elem("dependency_failed");
 463   } else {
 464     log->begin_elem("dependency");
 465   }
 466   log->print(" type='%s'", dep_name(dept));
 467   if (ctxkj >= 0) {
 468     log->print(" ctxk='%d'", argids[ctxkj]);
 469   }
 470   // write remaining arguments, if any.
 471   for (j = 0; j < nargs; j++) {
 472     if (j == ctxkj)  continue;  // already logged
 473     if (j == 1) {
 474       log->print(  " x='%d'",    argids[j]);
 475     } else {
 476       log->print(" x%d='%d'", j, argids[j]);
 477     }
 478   }
 479   if (witness != NULL) {
 480     log->object("witness", witness);
 481     log->stamp();
 482   }
 483   log->end_elem();
 484 }
 485 
 486 void Dependencies::write_dependency_to(xmlStream* xtty,
 487                                        DepType dept,
 488                                        int nargs, DepArgument args[],
 489                                        Klass* witness) {
 490   if (xtty == NULL)  return;
 491   ttyLocker ttyl;
 492   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 493   if (witness != NULL) {
 494     xtty->begin_elem("dependency_failed");
 495   } else {
 496     xtty->begin_elem("dependency");
 497   }
 498   xtty->print(" type='%s'", dep_name(dept));
 499   if (ctxkj >= 0) {
 500     xtty->object("ctxk", args[ctxkj].metadata_value());
 501   }
 502   // write remaining arguments, if any.
 503   for (int j = 0; j < nargs; j++) {
 504     if (j == ctxkj)  continue;  // already logged
 505     if (j == 1) {
 506       if (args[j].is_oop()) {
 507         xtty->object("x", args[j].oop_value());
 508       } else {
 509         xtty->object("x", args[j].metadata_value());
 510       }
 511     } else {
 512       char xn[10]; sprintf(xn, "x%d", j);
 513       if (args[j].is_oop()) {
 514         xtty->object(xn, args[j].oop_value());
 515       } else {
 516         xtty->object(xn, args[j].metadata_value());
 517       }
 518     }
 519   }
 520   if (witness != NULL) {
 521     xtty->object("witness", witness);
 522     xtty->stamp();
 523   }
 524   xtty->end_elem();
 525 }
 526 
 527 void Dependencies::print_dependency(DepType dept, int nargs, DepArgument args[],
 528                                     Klass* witness) {
 529   ResourceMark rm;
 530   ttyLocker ttyl;   // keep the following output all in one block
 531   tty->print_cr("%s of type %s",
 532                 (witness == NULL)? "Dependency": "Failed dependency",
 533                 dep_name(dept));
 534   // print arguments
 535   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 536   for (int j = 0; j < nargs; j++) {
 537     DepArgument arg = args[j];
 538     bool put_star = false;
 539     if (arg.is_null())  continue;
 540     const char* what;
 541     if (j == ctxkj) {
 542       assert(arg.is_metadata(), "must be");
 543       what = "context";
 544       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
 545     } else if (arg.is_method()) {
 546       what = "method ";
 547       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value());
 548     } else if (arg.is_klass()) {
 549       what = "class  ";
 550     } else {
 551       what = "object ";
 552     }
 553     tty->print("  %s = %s", what, (put_star? "*": ""));
 554     if (arg.is_klass())
 555       tty->print("%s", ((Klass*)arg.metadata_value())->external_name());
 556     else if (arg.is_method())
 557       ((Method*)arg.metadata_value())->print_value();
 558     else
 559       ShouldNotReachHere(); // Provide impl for this type.
 560     tty->cr();
 561   }
 562   if (witness != NULL) {
 563     bool put_star = !Dependencies::is_concrete_klass(witness);
 564     tty->print_cr("  witness = %s%s",
 565                   (put_star? "*": ""),
 566                   witness->external_name());
 567   }
 568 }
 569 
 570 void Dependencies::DepStream::log_dependency(Klass* witness) {
 571   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
 572   ResourceMark rm;
 573   int nargs = argument_count();
 574   DepArgument args[max_arg_count];
 575   for (int j = 0; j < nargs; j++) {
 576     if (type() == call_site_target_value) {
 577       args[j] = argument_oop(j);
 578     } else {
 579       args[j] = argument(j);
 580     }
 581   }
 582   if (_deps != NULL && _deps->log() != NULL) {
 583     Dependencies::write_dependency_to(_deps->log(),
 584                                       type(), nargs, args, witness);
 585   } else {
 586     Dependencies::write_dependency_to(xtty,
 587                                       type(), nargs, args, witness);
 588   }
 589 }
 590 
 591 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) {
 592   int nargs = argument_count();
 593   DepArgument args[max_arg_count];
 594   for (int j = 0; j < nargs; j++) {
 595     args[j] = argument(j);
 596   }
 597   Dependencies::print_dependency(type(), nargs, args, witness);
 598   if (verbose) {
 599     if (_code != NULL) {
 600       tty->print("  code: ");
 601       _code->print_value_on(tty);
 602       tty->cr();
 603     }
 604   }
 605 }
 606 
 607 
 608 /// Dependency stream support (decodes dependencies from an nmethod):
 609 
 610 #ifdef ASSERT
 611 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
 612   assert(must_be_in_vm(), "raw oops here");
 613   _byte_limit = byte_limit;
 614   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
 615   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
 616 }
 617 #endif //ASSERT
 618 
 619 bool Dependencies::DepStream::next() {
 620   assert(_type != end_marker, "already at end");
 621   if (_bytes.position() == 0 && _code != NULL
 622       && _code->dependencies_size() == 0) {
 623     // Method has no dependencies at all.
 624     return false;
 625   }
 626   int code_byte = (_bytes.read_byte() & 0xFF);
 627   if (code_byte == end_marker) {
 628     DEBUG_ONLY(_type = end_marker);
 629     return false;
 630   } else {
 631     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
 632     code_byte -= ctxk_bit;
 633     DepType dept = (DepType)code_byte;
 634     _type = dept;
 635     Dependencies::check_valid_dependency_type(dept);
 636     int stride = _dep_args[dept];
 637     assert(stride == dep_args(dept), "sanity");
 638     int skipj = -1;
 639     if (ctxk_bit != 0) {
 640       skipj = 0;  // currently the only context argument is at zero
 641       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
 642     }
 643     for (int j = 0; j < stride; j++) {
 644       _xi[j] = (j == skipj)? 0: _bytes.read_int();
 645     }
 646     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
 647     return true;
 648   }
 649 }
 650 
 651 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
 652   Metadata* o = NULL;
 653   if (_code != NULL) {
 654     o = _code->metadata_at(i);
 655   } else {
 656     o = _deps->oop_recorder()->metadata_at(i);
 657   }
 658   return o;
 659 }
 660 
 661 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
 662   return (_code != NULL)
 663          ? _code->oop_at(i)
 664     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
 665 }
 666 
 667 Metadata* Dependencies::DepStream::argument(int i) {
 668   Metadata* result = recorded_metadata_at(argument_index(i));
 669 
 670   if (result == NULL) { // Explicit context argument can be compressed
 671     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 672     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
 673       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
 674     }
 675   }
 676 
 677   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
 678   return result;
 679 }
 680 
 681 /**
 682  * Returns a unique identifier for each dependency argument.
 683  */
 684 uintptr_t Dependencies::DepStream::get_identifier(int i) {
 685   if (has_oop_argument()) {
 686     return (uintptr_t)(oopDesc*)argument_oop(i);
 687   } else {
 688     return (uintptr_t)argument(i);
 689   }
 690 }
 691 
 692 oop Dependencies::DepStream::argument_oop(int i) {
 693   oop result = recorded_oop_at(argument_index(i));
 694   assert(result == NULL || result->is_oop(), "must be");
 695   return result;
 696 }
 697 
 698 Klass* Dependencies::DepStream::context_type() {
 699   assert(must_be_in_vm(), "raw oops here");
 700 
 701   // Most dependencies have an explicit context type argument.
 702   {
 703     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 704     if (ctxkj >= 0) {
 705       Metadata* k = argument(ctxkj);
 706       assert(k != NULL && k->is_klass(), "type check");
 707       return (Klass*)k;
 708     }
 709   }
 710 
 711   // Some dependencies are using the klass of the first object
 712   // argument as implicit context type (e.g. call_site_target_value).
 713   {
 714     int ctxkj = dep_implicit_context_arg(type());
 715     if (ctxkj >= 0) {
 716       Klass* k = argument_oop(ctxkj)->klass();
 717       assert(k != NULL && k->is_klass(), "type check");
 718       return (Klass*) k;
 719     }
 720   }
 721 
 722   // And some dependencies don't have a context type at all,
 723   // e.g. evol_method.
 724   return NULL;
 725 }
 726 
 727 // ----------------- DependencySignature --------------------------------------
 728 bool DependencySignature::equals(const DependencySignature& sig) const {
 729   if (type() != sig.type()) {
 730     return false;
 731   }
 732 
 733   if (args_count() != sig.args_count()) {
 734     return false;
 735   }
 736 
 737   for (int i = 0; i < sig.args_count(); i++) {
 738     if (arg(i) != sig.arg(i)) {
 739       return false;
 740     }
 741   }
 742   return true;
 743 }
 744 
 745 
 746 // ----------------- DependencySignatureBuffer --------------------------------------
 747 DependencySignatureBuffer::DependencySignatureBuffer() {
 748   _signatures = NEW_RESOURCE_ARRAY(GrowableArray<DependencySignature*>*, Dependencies::TYPE_LIMIT);
 749   memset(_signatures, 0, sizeof(DependencySignature*) * Dependencies::TYPE_LIMIT);
 750 }
 751 
 752 /* Check if arguments are identical. Two dependency signatures are considered
 753  * identical, if the type as well as all argument identifiers are identical.
 754  * If the dependency has not already been checked, the dependency signature is
 755  * added to the checked dependencies of the same type. The function returns
 756  * false, which causes the dependency to be checked in the caller.
 757  */
 758 bool DependencySignatureBuffer::add_if_missing(const DependencySignature& sig) {
 759   const int index = sig.type();
 760   GrowableArray<DependencySignature*>* buffer = _signatures[index];
 761   if (buffer == NULL) {
 762     buffer = new GrowableArray<DependencySignature*>();
 763     _signatures[index] = buffer;
 764   }
 765 
 766   // Check if we have already checked the dependency
 767   for (int i = 0; i < buffer->length(); i++) {
 768     DependencySignature* checked_signature = buffer->at(i);
 769     if (checked_signature->equals(sig)) {
 770       return true;
 771     }
 772   }
 773   buffer->append((DependencySignature*)&sig);
 774   return false;
 775 }
 776 
 777 
 778 /// Checking dependencies:
 779 
 780 // This hierarchy walker inspects subtypes of a given type,
 781 // trying to find a "bad" class which breaks a dependency.
 782 // Such a class is called a "witness" to the broken dependency.
 783 // While searching around, we ignore "participants", which
 784 // are already known to the dependency.
 785 class ClassHierarchyWalker {
 786  public:
 787   enum { PARTICIPANT_LIMIT = 3 };
 788 
 789  private:
 790   // optional method descriptor to check for:
 791   Symbol* _name;
 792   Symbol* _signature;
 793 
 794   // special classes which are not allowed to be witnesses:
 795   Klass*    _participants[PARTICIPANT_LIMIT+1];
 796   int       _num_participants;
 797 
 798   // cache of method lookups
 799   Method* _found_methods[PARTICIPANT_LIMIT+1];
 800 
 801   // if non-zero, tells how many witnesses to convert to participants
 802   int       _record_witnesses;
 803 
 804   void initialize(Klass* participant) {
 805     _record_witnesses = 0;
 806     _participants[0]  = participant;
 807     _found_methods[0] = NULL;
 808     _num_participants = 0;
 809     if (participant != NULL) {
 810       // Terminating NULL.
 811       _participants[1] = NULL;
 812       _found_methods[1] = NULL;
 813       _num_participants = 1;
 814     }
 815   }
 816 
 817   void initialize_from_method(Method* m) {
 818     assert(m != NULL && m->is_method(), "sanity");
 819     _name      = m->name();
 820     _signature = m->signature();
 821   }
 822 
 823  public:
 824   // The walker is initialized to recognize certain methods and/or types
 825   // as friendly participants.
 826   ClassHierarchyWalker(Klass* participant, Method* m) {
 827     initialize_from_method(m);
 828     initialize(participant);
 829   }
 830   ClassHierarchyWalker(Method* m) {
 831     initialize_from_method(m);
 832     initialize(NULL);
 833   }
 834   ClassHierarchyWalker(Klass* participant = NULL) {
 835     _name      = NULL;
 836     _signature = NULL;
 837     initialize(participant);
 838   }
 839 
 840   // This is common code for two searches:  One for concrete subtypes,
 841   // the other for concrete method implementations and overrides.
 842   bool doing_subtype_search() {
 843     return _name == NULL;
 844   }
 845 
 846   int num_participants() { return _num_participants; }
 847   Klass* participant(int n) {
 848     assert((uint)n <= (uint)_num_participants, "oob");
 849     return _participants[n];
 850   }
 851 
 852   // Note:  If n==num_participants, returns NULL.
 853   Method* found_method(int n) {
 854     assert((uint)n <= (uint)_num_participants, "oob");
 855     Method* fm = _found_methods[n];
 856     assert(n == _num_participants || fm != NULL, "proper usage");
 857     assert(fm == NULL || fm->method_holder() == _participants[n], "sanity");
 858     return fm;
 859   }
 860 
 861 #ifdef ASSERT
 862   // Assert that m is inherited into ctxk, without intervening overrides.
 863   // (May return true even if this is not true, in corner cases where we punt.)
 864   bool check_method_context(Klass* ctxk, Method* m) {
 865     if (m->method_holder() == ctxk)
 866       return true;  // Quick win.
 867     if (m->is_private())
 868       return false; // Quick lose.  Should not happen.
 869     if (!(m->is_public() || m->is_protected()))
 870       // The override story is complex when packages get involved.
 871       return true;  // Must punt the assertion to true.
 872     Klass* k = ctxk;
 873     Method* lm = k->lookup_method(m->name(), m->signature());
 874     if (lm == NULL && k->oop_is_instance()) {
 875       // It might be an interface method
 876         lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(),
 877                                                                 m->signature());
 878     }
 879     if (lm == m)
 880       // Method m is inherited into ctxk.
 881       return true;
 882     if (lm != NULL) {
 883       if (!(lm->is_public() || lm->is_protected())) {
 884         // Method is [package-]private, so the override story is complex.
 885         return true;  // Must punt the assertion to true.
 886       }
 887       if (lm->is_static()) {
 888         // Static methods don't override non-static so punt
 889         return true;
 890       }
 891       if (   !Dependencies::is_concrete_method(lm)
 892           && !Dependencies::is_concrete_method(m)
 893           && lm->method_holder()->is_subtype_of(m->method_holder()))
 894         // Method m is overridden by lm, but both are non-concrete.
 895         return true;
 896     }
 897     ResourceMark rm;
 898     tty->print_cr("Dependency method not found in the associated context:");
 899     tty->print_cr("  context = %s", ctxk->external_name());
 900     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
 901     if (lm != NULL) {
 902       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
 903     }
 904     return false;
 905   }
 906 #endif
 907 
 908   void add_participant(Klass* participant) {
 909     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
 910     int np = _num_participants++;
 911     _participants[np] = participant;
 912     _participants[np+1] = NULL;
 913     _found_methods[np+1] = NULL;
 914   }
 915 
 916   void record_witnesses(int add) {
 917     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
 918     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
 919     _record_witnesses = add;
 920   }
 921 
 922   bool is_witness(Klass* k) {
 923     if (doing_subtype_search()) {
 924       return Dependencies::is_concrete_klass(k);
 925     } else {
 926       Method* m = InstanceKlass::cast(k)->find_method(_name, _signature);
 927       if (m == NULL || !Dependencies::is_concrete_method(m))  return false;
 928       _found_methods[_num_participants] = m;
 929       // Note:  If add_participant(k) is called,
 930       // the method m will already be memoized for it.
 931       return true;
 932     }
 933   }
 934 
 935   bool is_participant(Klass* k) {
 936     if (k == _participants[0]) {
 937       return true;
 938     } else if (_num_participants <= 1) {
 939       return false;
 940     } else {
 941       return in_list(k, &_participants[1]);
 942     }
 943   }
 944   bool ignore_witness(Klass* witness) {
 945     if (_record_witnesses == 0) {
 946       return false;
 947     } else {
 948       --_record_witnesses;
 949       add_participant(witness);
 950       return true;
 951     }
 952   }
 953   static bool in_list(Klass* x, Klass** list) {
 954     for (int i = 0; ; i++) {
 955       Klass* y = list[i];
 956       if (y == NULL)  break;
 957       if (y == x)  return true;
 958     }
 959     return false;  // not in list
 960   }
 961 
 962  private:
 963   // the actual search method:
 964   Klass* find_witness_anywhere(Klass* context_type,
 965                                  bool participants_hide_witnesses,
 966                                  bool top_level_call = true);
 967   // the spot-checking version:
 968   Klass* find_witness_in(KlassDepChange& changes,
 969                          Klass* context_type,
 970                            bool participants_hide_witnesses);
 971  public:
 972   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
 973     assert(doing_subtype_search(), "must set up a subtype search");
 974     // When looking for unexpected concrete types,
 975     // do not look beneath expected ones.
 976     const bool participants_hide_witnesses = true;
 977     // CX > CC > C' is OK, even if C' is new.
 978     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
 979     if (changes != NULL) {
 980       return find_witness_in(*changes, context_type, participants_hide_witnesses);
 981     } else {
 982       return find_witness_anywhere(context_type, participants_hide_witnesses);
 983     }
 984   }
 985   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
 986     assert(!doing_subtype_search(), "must set up a method definer search");
 987     // When looking for unexpected concrete methods,
 988     // look beneath expected ones, to see if there are overrides.
 989     const bool participants_hide_witnesses = true;
 990     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
 991     if (changes != NULL) {
 992       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
 993     } else {
 994       return find_witness_anywhere(context_type, !participants_hide_witnesses);
 995     }
 996   }
 997 };
 998 
 999 #ifndef PRODUCT
1000 static int deps_find_witness_calls = 0;
1001 static int deps_find_witness_steps = 0;
1002 static int deps_find_witness_recursions = 0;
1003 static int deps_find_witness_singles = 0;
1004 static int deps_find_witness_print = 0; // set to -1 to force a final print
1005 static bool count_find_witness_calls() {
1006   if (TraceDependencies || LogCompilation) {
1007     int pcount = deps_find_witness_print + 1;
1008     bool final_stats      = (pcount == 0);
1009     bool initial_call     = (pcount == 1);
1010     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1011     if (pcount < 0)  pcount = 1; // crude overflow protection
1012     deps_find_witness_print = pcount;
1013     if (VerifyDependencies && initial_call) {
1014       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1015     }
1016     if (occasional_print || final_stats) {
1017       // Every now and then dump a little info about dependency searching.
1018       if (xtty != NULL) {
1019        ttyLocker ttyl;
1020        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1021                    deps_find_witness_calls,
1022                    deps_find_witness_steps,
1023                    deps_find_witness_recursions,
1024                    deps_find_witness_singles);
1025       }
1026       if (final_stats || (TraceDependencies && WizardMode)) {
1027         ttyLocker ttyl;
1028         tty->print_cr("Dependency check (find_witness) "
1029                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1030                       deps_find_witness_calls,
1031                       deps_find_witness_steps,
1032                       (double)deps_find_witness_steps / deps_find_witness_calls,
1033                       deps_find_witness_recursions,
1034                       deps_find_witness_singles);
1035       }
1036     }
1037     return true;
1038   }
1039   return false;
1040 }
1041 #else
1042 #define count_find_witness_calls() (0)
1043 #endif //PRODUCT
1044 
1045 
1046 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1047                                                Klass* context_type,
1048                                                bool participants_hide_witnesses) {
1049   assert(changes.involves_context(context_type), "irrelevant dependency");
1050   Klass* new_type = changes.new_type();
1051 
1052   (void)count_find_witness_calls();
1053   NOT_PRODUCT(deps_find_witness_singles++);
1054 
1055   // Current thread must be in VM (not native mode, as in CI):
1056   assert(must_be_in_vm(), "raw oops here");
1057   // Must not move the class hierarchy during this check:
1058   assert_locked_or_safepoint(Compile_lock);
1059 
1060   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1061   if (nof_impls > 1) {
1062     // Avoid this case: *I.m > { A.m, C }; B.m > C
1063     // %%% Until this is fixed more systematically, bail out.
1064     // See corresponding comment in find_witness_anywhere.
1065     return context_type;
1066   }
1067 
1068   assert(!is_participant(new_type), "only old classes are participants");
1069   if (participants_hide_witnesses) {
1070     // If the new type is a subtype of a participant, we are done.
1071     for (int i = 0; i < num_participants(); i++) {
1072       Klass* part = participant(i);
1073       if (part == NULL)  continue;
1074       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1075              "correct marking of participants, b/c new_type is unique");
1076       if (changes.involves_context(part)) {
1077         // new guy is protected from this check by previous participant
1078         return NULL;
1079       }
1080     }
1081   }
1082 
1083   if (is_witness(new_type) &&
1084       !ignore_witness(new_type)) {
1085     return new_type;
1086   }
1087 
1088   return NULL;
1089 }
1090 
1091 
1092 // Walk hierarchy under a context type, looking for unexpected types.
1093 // Do not report participant types, and recursively walk beneath
1094 // them only if participants_hide_witnesses is false.
1095 // If top_level_call is false, skip testing the context type,
1096 // because the caller has already considered it.
1097 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1098                                                      bool participants_hide_witnesses,
1099                                                      bool top_level_call) {
1100   // Current thread must be in VM (not native mode, as in CI):
1101   assert(must_be_in_vm(), "raw oops here");
1102   // Must not move the class hierarchy during this check:
1103   assert_locked_or_safepoint(Compile_lock);
1104 
1105   bool do_counts = count_find_witness_calls();
1106 
1107   // Check the root of the sub-hierarchy first.
1108   if (top_level_call) {
1109     if (do_counts) {
1110       NOT_PRODUCT(deps_find_witness_calls++);
1111       NOT_PRODUCT(deps_find_witness_steps++);
1112     }
1113     if (is_participant(context_type)) {
1114       if (participants_hide_witnesses)  return NULL;
1115       // else fall through to search loop...
1116     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1117       // The context is an abstract class or interface, to start with.
1118       return context_type;
1119     }
1120   }
1121 
1122   // Now we must check each implementor and each subclass.
1123   // Use a short worklist to avoid blowing the stack.
1124   // Each worklist entry is a *chain* of subklass siblings to process.
1125   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1126   Klass* chains[CHAINMAX];
1127   int    chaini = 0;  // index into worklist
1128   Klass* chain;       // scratch variable
1129 #define ADD_SUBCLASS_CHAIN(k)                     {  \
1130     assert(chaini < CHAINMAX, "oob");                \
1131     chain = InstanceKlass::cast(k)->subklass();      \
1132     if (chain != NULL)  chains[chaini++] = chain;    }
1133 
1134   // Look for non-abstract subclasses.
1135   // (Note:  Interfaces do not have subclasses.)
1136   ADD_SUBCLASS_CHAIN(context_type);
1137 
1138   // If it is an interface, search its direct implementors.
1139   // (Their subclasses are additional indirect implementors.
1140   // See InstanceKlass::add_implementor.)
1141   // (Note:  nof_implementors is always zero for non-interfaces.)
1142   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1143   if (nof_impls > 1) {
1144     // Avoid this case: *I.m > { A.m, C }; B.m > C
1145     // Here, I.m has 2 concrete implementations, but m appears unique
1146     // as A.m, because the search misses B.m when checking C.
1147     // The inherited method B.m was getting missed by the walker
1148     // when interface 'I' was the starting point.
1149     // %%% Until this is fixed more systematically, bail out.
1150     // (Old CHA had the same limitation.)
1151     return context_type;
1152   }
1153   if (nof_impls > 0) {
1154     Klass* impl = InstanceKlass::cast(context_type)->implementor();
1155     assert(impl != NULL, "just checking");
1156     // If impl is the same as the context_type, then more than one
1157     // implementor has seen. No exact info in this case.
1158     if (impl == context_type) {
1159       return context_type;  // report an inexact witness to this sad affair
1160     }
1161     if (do_counts)
1162       { NOT_PRODUCT(deps_find_witness_steps++); }
1163     if (is_participant(impl)) {
1164       if (!participants_hide_witnesses) {
1165         ADD_SUBCLASS_CHAIN(impl);
1166       }
1167     } else if (is_witness(impl) && !ignore_witness(impl)) {
1168       return impl;
1169     } else {
1170       ADD_SUBCLASS_CHAIN(impl);
1171     }
1172   }
1173 
1174   // Recursively process each non-trivial sibling chain.
1175   while (chaini > 0) {
1176     Klass* chain = chains[--chaini];
1177     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1178       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1179       if (is_participant(sub)) {
1180         if (participants_hide_witnesses)  continue;
1181         // else fall through to process this guy's subclasses
1182       } else if (is_witness(sub) && !ignore_witness(sub)) {
1183         return sub;
1184       }
1185       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1186         // Fast path.  (Partially disabled if VerifyDependencies.)
1187         ADD_SUBCLASS_CHAIN(sub);
1188       } else {
1189         // Worklist overflow.  Do a recursive call.  Should be rare.
1190         // The recursive call will have its own worklist, of course.
1191         // (Note that sub has already been tested, so that there is
1192         // no need for the recursive call to re-test.  That's handy,
1193         // since the recursive call sees sub as the context_type.)
1194         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1195         Klass* witness = find_witness_anywhere(sub,
1196                                                  participants_hide_witnesses,
1197                                                  /*top_level_call=*/ false);
1198         if (witness != NULL)  return witness;
1199       }
1200     }
1201   }
1202 
1203   // No witness found.  The dependency remains unbroken.
1204   return NULL;
1205 #undef ADD_SUBCLASS_CHAIN
1206 }
1207 
1208 
1209 bool Dependencies::is_concrete_klass(Klass* k) {
1210   if (k->is_abstract())  return false;
1211   // %%% We could treat classes which are concrete but
1212   // have not yet been instantiated as virtually abstract.
1213   // This would require a deoptimization barrier on first instantiation.
1214   //if (k->is_not_instantiated())  return false;
1215   return true;
1216 }
1217 
1218 bool Dependencies::is_concrete_method(Method* m) {
1219   // Statics are irrelevant to virtual call sites.
1220   if (m->is_static())  return false;
1221 
1222   // We could also return false if m does not yet appear to be
1223   // executed, if the VM version supports this distinction also.
1224   // Default methods are considered "concrete" as well.
1225   return !m->is_abstract() &&
1226          !m->is_overpass(); // error functions aren't concrete
1227 }
1228 
1229 
1230 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1231   if (k->is_interface())  return NULL;
1232   if (k->has_finalizer()) return k;
1233   k = k->subklass();
1234   while (k != NULL) {
1235     Klass* result = find_finalizable_subclass(k);
1236     if (result != NULL) return result;
1237     k = k->next_sibling();
1238   }
1239   return NULL;
1240 }
1241 
1242 
1243 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1244   if (k->is_abstract())  return false;
1245   // We could also return false if k does not yet appear to be
1246   // instantiated, if the VM version supports this distinction also.
1247   //if (k->is_not_instantiated())  return false;
1248   return true;
1249 }
1250 
1251 bool Dependencies::is_concrete_method(ciMethod* m) {
1252   // Statics are irrelevant to virtual call sites.
1253   if (m->is_static())  return false;
1254 
1255   // We could also return false if m does not yet appear to be
1256   // executed, if the VM version supports this distinction also.
1257   return !m->is_abstract();
1258 }
1259 
1260 
1261 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1262   return k->has_finalizable_subclass();
1263 }
1264 
1265 
1266 // Any use of the contents (bytecodes) of a method must be
1267 // marked by an "evol_method" dependency, if those contents
1268 // can change.  (Note: A method is always dependent on itself.)
1269 Klass* Dependencies::check_evol_method(Method* m) {
1270   assert(must_be_in_vm(), "raw oops here");
1271   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1272   // Or is there a now a breakpoint?
1273   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1274   if (m->is_old()
1275       || m->number_of_breakpoints() > 0) {
1276     return m->method_holder();
1277   } else {
1278     return NULL;
1279   }
1280 }
1281 
1282 // This is a strong assertion:  It is that the given type
1283 // has no subtypes whatever.  It is most useful for
1284 // optimizing checks on reflected types or on array types.
1285 // (Checks on types which are derived from real instances
1286 // can be optimized more strongly than this, because we
1287 // know that the checked type comes from a concrete type,
1288 // and therefore we can disregard abstract types.)
1289 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1290   assert(must_be_in_vm(), "raw oops here");
1291   assert_locked_or_safepoint(Compile_lock);
1292   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1293   Klass* sub = ctx->subklass();
1294   if (sub != NULL) {
1295     return sub;
1296   } else if (ctx->nof_implementors() != 0) {
1297     // if it is an interface, it must be unimplemented
1298     // (if it is not an interface, nof_implementors is always zero)
1299     Klass* impl = ctx->implementor();
1300     assert(impl != NULL, "must be set");
1301     return impl;
1302   } else {
1303     return NULL;
1304   }
1305 }
1306 
1307 // Test the assertion that conck is the only concrete subtype* of ctxk.
1308 // The type conck itself is allowed to have have further concrete subtypes.
1309 // This allows the compiler to narrow occurrences of ctxk by conck,
1310 // when dealing with the types of actual instances.
1311 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1312                                                                    Klass* conck,
1313                                                                    KlassDepChange* changes) {
1314   ClassHierarchyWalker wf(conck);
1315   return wf.find_witness_subtype(ctxk, changes);
1316 }
1317 
1318 // If a non-concrete class has no concrete subtypes, it is not (yet)
1319 // instantiatable.  This can allow the compiler to make some paths go
1320 // dead, if they are gated by a test of the type.
1321 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1322                                                                KlassDepChange* changes) {
1323   // Find any concrete subtype, with no participants:
1324   ClassHierarchyWalker wf;
1325   return wf.find_witness_subtype(ctxk, changes);
1326 }
1327 
1328 
1329 // If a concrete class has no concrete subtypes, it can always be
1330 // exactly typed.  This allows the use of a cheaper type test.
1331 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1332                                                                KlassDepChange* changes) {
1333   // Find any concrete subtype, with only the ctxk as participant:
1334   ClassHierarchyWalker wf(ctxk);
1335   return wf.find_witness_subtype(ctxk, changes);
1336 }
1337 
1338 
1339 // Find the unique concrete proper subtype of ctxk, or NULL if there
1340 // is more than one concrete proper subtype.  If there are no concrete
1341 // proper subtypes, return ctxk itself, whether it is concrete or not.
1342 // The returned subtype is allowed to have have further concrete subtypes.
1343 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1344 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1345   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1346   wf.record_witnesses(1);          // Record one other witness when walking.
1347   Klass* wit = wf.find_witness_subtype(ctxk);
1348   if (wit != NULL)  return NULL;   // Too many witnesses.
1349   Klass* conck = wf.participant(0);
1350   if (conck == NULL) {
1351 #ifndef PRODUCT
1352     // Make sure the dependency mechanism will pass this discovery:
1353     if (VerifyDependencies) {
1354       // Turn off dependency tracing while actually testing deps.
1355       FlagSetting fs(TraceDependencies, false);
1356       if (!Dependencies::is_concrete_klass(ctxk)) {
1357         guarantee(NULL ==
1358                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
1359                   "verify dep.");
1360       } else {
1361         guarantee(NULL ==
1362                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
1363                   "verify dep.");
1364       }
1365     }
1366 #endif //PRODUCT
1367     return ctxk;                   // Return ctxk as a flag for "no subtypes".
1368   } else {
1369 #ifndef PRODUCT
1370     // Make sure the dependency mechanism will pass this discovery:
1371     if (VerifyDependencies) {
1372       // Turn off dependency tracing while actually testing deps.
1373       FlagSetting fs(TraceDependencies, false);
1374       if (!Dependencies::is_concrete_klass(ctxk)) {
1375         guarantee(NULL == (void *)
1376                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
1377                   "verify dep.");
1378       }
1379     }
1380 #endif //PRODUCT
1381     return conck;
1382   }
1383 }
1384 
1385 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1386 // except possibly for further subtypes of k[12] themselves.
1387 // The context type must be abstract.  The types k1 and k2 are themselves
1388 // allowed to have further concrete subtypes.
1389 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1390                                                 Klass* ctxk,
1391                                                 Klass* k1,
1392                                                 Klass* k2,
1393                                                 KlassDepChange* changes) {
1394   ClassHierarchyWalker wf;
1395   wf.add_participant(k1);
1396   wf.add_participant(k2);
1397   return wf.find_witness_subtype(ctxk, changes);
1398 }
1399 
1400 // Search ctxk for concrete implementations.  If there are klen or fewer,
1401 // pack them into the given array and return the number.
1402 // Otherwise, return -1, meaning the given array would overflow.
1403 // (Note that a return of 0 means there are exactly no concrete subtypes.)
1404 // In this search, if ctxk is concrete, it will be reported alone.
1405 // For any type CC reported, no proper subtypes of CC will be reported.
1406 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1407                                                    int klen,
1408                                                    Klass* karray[]) {
1409   ClassHierarchyWalker wf;
1410   wf.record_witnesses(klen);
1411   Klass* wit = wf.find_witness_subtype(ctxk);
1412   if (wit != NULL)  return -1;  // Too many witnesses.
1413   int num = wf.num_participants();
1414   assert(num <= klen, "oob");
1415   // Pack the result array with the good news.
1416   for (int i = 0; i < num; i++)
1417     karray[i] = wf.participant(i);
1418 #ifndef PRODUCT
1419   // Make sure the dependency mechanism will pass this discovery:
1420   if (VerifyDependencies) {
1421     // Turn off dependency tracing while actually testing deps.
1422     FlagSetting fs(TraceDependencies, false);
1423     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1424     case -1: // ctxk was itself concrete
1425       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1426       break;
1427     case 0:
1428       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1429                 "verify dep.");
1430       break;
1431     case 1:
1432       guarantee(NULL == (void *)
1433                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1434                 "verify dep.");
1435       break;
1436     case 2:
1437       guarantee(NULL == (void *)
1438                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
1439                                                                 karray[0],
1440                                                                 karray[1]),
1441                 "verify dep.");
1442       break;
1443     default:
1444       ShouldNotReachHere();  // klen > 2 yet supported
1445     }
1446   }
1447 #endif //PRODUCT
1448   return num;
1449 }
1450 
1451 // If a class (or interface) has a unique concrete method uniqm, return NULL.
1452 // Otherwise, return a class that contains an interfering method.
1453 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1454                                                     KlassDepChange* changes) {
1455   // Here is a missing optimization:  If uniqm->is_final(),
1456   // we don't really need to search beneath it for overrides.
1457   // This is probably not important, since we don't use dependencies
1458   // to track final methods.  (They can't be "definalized".)
1459   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1460   return wf.find_witness_definer(ctxk, changes);
1461 }
1462 
1463 // Find the set of all non-abstract methods under ctxk that match m.
1464 // (The method m must be defined or inherited in ctxk.)
1465 // Include m itself in the set, unless it is abstract.
1466 // If this set has exactly one element, return that element.
1467 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1468   ClassHierarchyWalker wf(m);
1469   assert(wf.check_method_context(ctxk, m), "proper context");
1470   wf.record_witnesses(1);
1471   Klass* wit = wf.find_witness_definer(ctxk);
1472   if (wit != NULL)  return NULL;  // Too many witnesses.
1473   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1474   if (Dependencies::is_concrete_method(m)) {
1475     if (fm == NULL) {
1476       // It turns out that m was always the only implementation.
1477       fm = m;
1478     } else if (fm != m) {
1479       // Two conflicting implementations after all.
1480       // (This can happen if m is inherited into ctxk and fm overrides it.)
1481       return NULL;
1482     }
1483   }
1484 #ifndef PRODUCT
1485   // Make sure the dependency mechanism will pass this discovery:
1486   if (VerifyDependencies && fm != NULL) {
1487     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1488               "verify dep.");
1489   }
1490 #endif //PRODUCT
1491   return fm;
1492 }
1493 
1494 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1495                                                         Method* m1,
1496                                                         Method* m2,
1497                                                         KlassDepChange* changes) {
1498   ClassHierarchyWalker wf(m1);
1499   wf.add_participant(m1->method_holder());
1500   wf.add_participant(m2->method_holder());
1501   return wf.find_witness_definer(ctxk, changes);
1502 }
1503 
1504 // Find the set of all non-abstract methods under ctxk that match m[0].
1505 // (The method m[0] must be defined or inherited in ctxk.)
1506 // Include m itself in the set, unless it is abstract.
1507 // Fill the given array m[0..(mlen-1)] with this set, and return the length.
1508 // (The length may be zero if no concrete methods are found anywhere.)
1509 // If there are too many concrete methods to fit in marray, return -1.
1510 int Dependencies::find_exclusive_concrete_methods(Klass* ctxk,
1511                                                   int mlen,
1512                                                   Method* marray[]) {
1513   Method* m0 = marray[0];
1514   ClassHierarchyWalker wf(m0);
1515   assert(wf.check_method_context(ctxk, m0), "proper context");
1516   wf.record_witnesses(mlen);
1517   bool participants_hide_witnesses = true;
1518   Klass* wit = wf.find_witness_definer(ctxk);
1519   if (wit != NULL)  return -1;  // Too many witnesses.
1520   int num = wf.num_participants();
1521   assert(num <= mlen, "oob");
1522   // Keep track of whether m is also part of the result set.
1523   int mfill = 0;
1524   assert(marray[mfill] == m0, "sanity");
1525   if (Dependencies::is_concrete_method(m0))
1526     mfill++;  // keep m0 as marray[0], the first result
1527   for (int i = 0; i < num; i++) {
1528     Method* fm = wf.found_method(i);
1529     if (fm == m0)  continue;  // Already put this guy in the list.
1530     if (mfill == mlen) {
1531       return -1;              // Oops.  Too many methods after all!
1532     }
1533     marray[mfill++] = fm;
1534   }
1535 #ifndef PRODUCT
1536   // Make sure the dependency mechanism will pass this discovery:
1537   if (VerifyDependencies) {
1538     // Turn off dependency tracing while actually testing deps.
1539     FlagSetting fs(TraceDependencies, false);
1540     switch (mfill) {
1541     case 1:
1542       guarantee(NULL == (void *)check_unique_concrete_method(ctxk, marray[0]),
1543                 "verify dep.");
1544       break;
1545     case 2:
1546       guarantee(NULL == (void *)
1547                 check_exclusive_concrete_methods(ctxk, marray[0], marray[1]),
1548                 "verify dep.");
1549       break;
1550     default:
1551       ShouldNotReachHere();  // mlen > 2 yet supported
1552     }
1553   }
1554 #endif //PRODUCT
1555   return mfill;
1556 }
1557 
1558 
1559 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1560   Klass* search_at = ctxk;
1561   if (changes != NULL)
1562     search_at = changes->new_type(); // just look at the new bit
1563   return find_finalizable_subclass(search_at);
1564 }
1565 
1566 
1567 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1568   assert(call_site    ->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1569   assert(method_handle->is_a(SystemDictionary::MethodHandle_klass()), "sanity");
1570   if (changes == NULL) {
1571     // Validate all CallSites
1572     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1573       return call_site->klass();  // assertion failed
1574   } else {
1575     // Validate the given CallSite
1576     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1577       assert(method_handle != changes->method_handle(), "must be");
1578       return call_site->klass();  // assertion failed
1579     }
1580   }
1581   return NULL;  // assertion still valid
1582 }
1583 
1584 
1585 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1586   if (witness != NULL) {
1587     if (TraceDependencies) {
1588       print_dependency(witness, /*verbose=*/ true);
1589     }
1590     // The following is a no-op unless logging is enabled:
1591     log_dependency(witness);
1592   }
1593 }
1594 
1595 
1596 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1597   assert_locked_or_safepoint(Compile_lock);
1598   Dependencies::check_valid_dependency_type(type());
1599 
1600   Klass* witness = NULL;
1601   switch (type()) {
1602   case evol_method:
1603     witness = check_evol_method(method_argument(0));
1604     break;
1605   case leaf_type:
1606     witness = check_leaf_type(context_type());
1607     break;
1608   case abstract_with_unique_concrete_subtype:
1609     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1610     break;
1611   case abstract_with_no_concrete_subtype:
1612     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1613     break;
1614   case concrete_with_no_concrete_subtype:
1615     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1616     break;
1617   case unique_concrete_method:
1618     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1619     break;
1620   case abstract_with_exclusive_concrete_subtypes_2:
1621     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1622     break;
1623   case exclusive_concrete_methods_2:
1624     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1625     break;
1626   case no_finalizable_subclasses:
1627     witness = check_has_no_finalizable_subclasses(context_type(), changes);
1628     break;
1629   default:
1630     witness = NULL;
1631     break;
1632   }
1633   trace_and_log_witness(witness);
1634   return witness;
1635 }
1636 
1637 
1638 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1639   assert_locked_or_safepoint(Compile_lock);
1640   Dependencies::check_valid_dependency_type(type());
1641 
1642   Klass* witness = NULL;
1643   switch (type()) {
1644   case call_site_target_value:
1645     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1646     break;
1647   default:
1648     witness = NULL;
1649     break;
1650   }
1651   trace_and_log_witness(witness);
1652   return witness;
1653 }
1654 
1655 
1656 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1657   // Handle klass dependency
1658   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1659     return check_klass_dependency(changes.as_klass_change());
1660 
1661   // Handle CallSite dependency
1662   if (changes.is_call_site_change())
1663     return check_call_site_dependency(changes.as_call_site_change());
1664 
1665   // irrelevant dependency; skip it
1666   return NULL;
1667 }
1668 
1669 
1670 void DepChange::print() {
1671   int nsup = 0, nint = 0;
1672   for (ContextStream str(*this); str.next(); ) {
1673     Klass* k = str.klass();
1674     switch (str.change_type()) {
1675     case Change_new_type:
1676       tty->print_cr("  dependee = %s", InstanceKlass::cast(k)->external_name());
1677       break;
1678     case Change_new_sub:
1679       if (!WizardMode) {
1680         ++nsup;
1681       } else {
1682         tty->print_cr("  context super = %s", InstanceKlass::cast(k)->external_name());
1683       }
1684       break;
1685     case Change_new_impl:
1686       if (!WizardMode) {
1687         ++nint;
1688       } else {
1689         tty->print_cr("  context interface = %s", InstanceKlass::cast(k)->external_name());
1690       }
1691       break;
1692     }
1693   }
1694   if (nsup + nint != 0) {
1695     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1696   }
1697 }
1698 
1699 void DepChange::ContextStream::start() {
1700   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1701   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1702   _klass = new_type;
1703   _ti_base = NULL;
1704   _ti_index = 0;
1705   _ti_limit = 0;
1706 }
1707 
1708 bool DepChange::ContextStream::next() {
1709   switch (_change_type) {
1710   case Start_Klass:             // initial state; _klass is the new type
1711     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1712     _ti_index = 0;
1713     _change_type = Change_new_type;
1714     return true;
1715   case Change_new_type:
1716     // fall through:
1717     _change_type = Change_new_sub;
1718   case Change_new_sub:
1719     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1720     {
1721       _klass = InstanceKlass::cast(_klass)->super();
1722       if (_klass != NULL) {
1723         return true;
1724       }
1725     }
1726     // else set up _ti_limit and fall through:
1727     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1728     _change_type = Change_new_impl;
1729   case Change_new_impl:
1730     if (_ti_index < _ti_limit) {
1731       _klass = _ti_base->at(_ti_index++);
1732       return true;
1733     }
1734     // fall through:
1735     _change_type = NO_CHANGE;  // iterator is exhausted
1736   case NO_CHANGE:
1737     break;
1738   default:
1739     ShouldNotReachHere();
1740   }
1741   return false;
1742 }
1743 
1744 void KlassDepChange::initialize() {
1745   // entire transaction must be under this lock:
1746   assert_lock_strong(Compile_lock);
1747 
1748   // Mark all dependee and all its superclasses
1749   // Mark transitive interfaces
1750   for (ContextStream str(*this); str.next(); ) {
1751     Klass* d = str.klass();
1752     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1753     InstanceKlass::cast(d)->set_is_marked_dependent(true);
1754   }
1755 }
1756 
1757 KlassDepChange::~KlassDepChange() {
1758   // Unmark all dependee and all its superclasses
1759   // Unmark transitive interfaces
1760   for (ContextStream str(*this); str.next(); ) {
1761     Klass* d = str.klass();
1762     InstanceKlass::cast(d)->set_is_marked_dependent(false);
1763   }
1764 }
1765 
1766 bool KlassDepChange::involves_context(Klass* k) {
1767   if (k == NULL || !k->oop_is_instance()) {
1768     return false;
1769   }
1770   InstanceKlass* ik = InstanceKlass::cast(k);
1771   bool is_contained = ik->is_marked_dependent();
1772   assert(is_contained == new_type()->is_subtype_of(k),
1773          "correct marking of potential context types");
1774   return is_contained;
1775 }
1776 
1777 #ifndef PRODUCT
1778 void Dependencies::print_statistics() {
1779   if (deps_find_witness_print != 0) {
1780     // Call one final time, to flush out the data.
1781     deps_find_witness_print = -1;
1782     count_find_witness_calls();
1783   }
1784 }
1785 #endif