1 /*
   2  * Copyright (c) 2005, 2015, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciArrayKlass.hpp"
  27 #include "ci/ciEnv.hpp"
  28 #include "ci/ciKlass.hpp"
  29 #include "ci/ciField.hpp"
  30 #include "ci/ciMethod.hpp"
  31 #include "classfile/javaClasses.inline.hpp"
  32 #include "code/dependencies.hpp"
  33 #include "compiler/compileLog.hpp"
  34 #include "oops/oop.inline.hpp"
  35 #include "runtime/handles.hpp"
  36 #include "runtime/handles.inline.hpp"
  37 #include "runtime/perfData.hpp"
  38 #include "runtime/thread.inline.hpp"
  39 #include "utilities/copy.hpp"
  40 
  41 #ifdef ASSERT
  42 static bool must_be_in_vm() {
  43   Thread* thread = Thread::current();
  44   if (thread->is_Java_thread())
  45     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  46   else
  47     return true;  //something like this: thread->is_VM_thread();
  48 }
  49 #endif //ASSERT
  50 
  51 // Globals
  52 
  53 PerfCounter*    Dependencies::_perf_dependency_checking_time = NULL;
  54 PerfCounter*    Dependencies::_perf_dependencies_checked_count = NULL;
  55 PerfCounter*    Dependencies::_perf_dependencies_invalidated = NULL;
  56 PerfCounter*    Dependencies::_perf_dependencies_total_count = NULL;
  57 PerfCounter*    Dependencies::_perf_dependencies_context_traversals = NULL;
  58 
  59 void Dependencies::initialize(ciEnv* env) {
  60   Arena* arena = env->arena();
  61   _oop_recorder = env->oop_recorder();
  62   _log = env->log();
  63   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  64   DEBUG_ONLY(_deps[end_marker] = NULL);
  65   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  66     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  67   }
  68   _content_bytes = NULL;
  69   _size_in_bytes = (size_t)-1;
  70 
  71   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  72 }
  73 
  74 void Dependencies::assert_evol_method(ciMethod* m) {
  75   assert_common_1(evol_method, m);
  76 }
  77 
  78 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  79   if (ctxk->is_array_klass()) {
  80     // As a special case, support this assertion on an array type,
  81     // which reduces to an assertion on its element type.
  82     // Note that this cannot be done with assertions that
  83     // relate to concreteness or abstractness.
  84     ciType* elemt = ctxk->as_array_klass()->base_element_type();
  85     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
  86     ctxk = elemt->as_instance_klass();
  87     //if (ctxk->is_final())  return;            // Ex:  String[][]
  88   }
  89   check_ctxk(ctxk);
  90   assert_common_1(leaf_type, ctxk);
  91 }
  92 
  93 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  94   check_ctxk_abstract(ctxk);
  95   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  96 }
  97 
  98 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
  99   check_ctxk_abstract(ctxk);
 100   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
 101 }
 102 
 103 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
 104   check_ctxk_concrete(ctxk);
 105   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
 106 }
 107 
 108 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
 109   check_ctxk(ctxk);
 110   assert_common_2(unique_concrete_method, ctxk, uniqm);
 111 }
 112 
 113 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
 114   check_ctxk(ctxk);
 115   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
 116 }
 117 
 118 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
 119   check_ctxk(ctxk);
 120   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
 121 }
 122 
 123 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
 124   check_ctxk(ctxk);
 125   assert_common_1(no_finalizable_subclasses, ctxk);
 126 }
 127 
 128 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
 129   assert_common_2(call_site_target_value, call_site, method_handle);
 130 }
 131 
 132 void Dependencies::assert_constant_field_value_klass(ciField* field, ciKlass* ctxk) {
 133   // FIXME: how to record a field? no metadata associated; offset is int
 134   assert_common_1(constant_field_value_klass, ctxk /*, field*/);
 135 }
 136 
 137 void Dependencies::assert_constant_field_value_instance(ciField* field, ciObject* obj) {
 138   if (field->holder()->set_finals()) {
 139     // FIXME: how to record a field? no metadata associated; offset is int
 140     assert_common_2(constant_field_value_instance, field->holder(), /*field,*/ obj);
 141   } else {
 142     assert_constant_field_value_klass(field, field->holder());
 143   }
 144 }
 145 
 146 // Helper function.  If we are adding a new dep. under ctxk2,
 147 // try to find an old dep. under a broader* ctxk1.  If there is
 148 //
 149 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
 150                                     int ctxk_i, ciKlass* ctxk2) {
 151   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
 152   if (ctxk2->is_subtype_of(ctxk1)) {
 153     return true;  // success, and no need to change
 154   } else if (ctxk1->is_subtype_of(ctxk2)) {
 155     // new context class fully subsumes previous one
 156     deps->at_put(ctxk_i, ctxk2);
 157     return true;
 158   } else {
 159     return false;
 160   }
 161 }
 162 
 163 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
 164   assert(dep_args(dept) == 1, "sanity");
 165   log_dependency(dept, x);
 166   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 167 
 168   // see if the same (or a similar) dep is already recorded
 169   if (note_dep_seen(dept, x)) {
 170     assert(deps->find(x) >= 0, "sanity");
 171   } else {
 172     deps->append(x);
 173   }
 174 }
 175 
 176 void Dependencies::assert_common_2(DepType dept,
 177                                    ciBaseObject* x0, ciBaseObject* x1) {
 178   assert(dep_args(dept) == 2, "sanity");
 179   log_dependency(dept, x0, x1);
 180   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 181 
 182   // see if the same (or a similar) dep is already recorded
 183   bool has_ctxk = has_explicit_context_arg(dept);
 184   if (has_ctxk) {
 185     assert(dep_context_arg(dept) == 0, "sanity");
 186     if (note_dep_seen(dept, x1)) {
 187       // look in this bucket for redundant assertions
 188       const int stride = 2;
 189       for (int i = deps->length(); (i -= stride) >= 0; ) {
 190         ciBaseObject* y1 = deps->at(i+1);
 191         if (x1 == y1) {  // same subject; check the context
 192           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
 193             return;
 194           }
 195         }
 196       }
 197     }
 198   } else {
 199     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 200       // look in this bucket for redundant assertions
 201       const int stride = 2;
 202       for (int i = deps->length(); (i -= stride) >= 0; ) {
 203         ciBaseObject* y0 = deps->at(i+0);
 204         ciBaseObject* y1 = deps->at(i+1);
 205         if (x0 == y0 && x1 == y1) {
 206           return;
 207         }
 208       }
 209     }
 210   }
 211 
 212   // append the assertion in the correct bucket:
 213   deps->append(x0);
 214   deps->append(x1);
 215 }
 216 
 217 void Dependencies::assert_common_3(DepType dept,
 218                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
 219   assert(dep_context_arg(dept) == 0, "sanity");
 220   assert(dep_args(dept) == 3, "sanity");
 221   log_dependency(dept, ctxk, x, x2);
 222   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 223 
 224   // try to normalize an unordered pair:
 225   bool swap = false;
 226   switch (dept) {
 227   case abstract_with_exclusive_concrete_subtypes_2:
 228     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
 229     break;
 230   case exclusive_concrete_methods_2:
 231     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
 232     break;
 233   }
 234   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
 235 
 236   // see if the same (or a similar) dep is already recorded
 237   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
 238     // look in this bucket for redundant assertions
 239     const int stride = 3;
 240     for (int i = deps->length(); (i -= stride) >= 0; ) {
 241       ciBaseObject* y  = deps->at(i+1);
 242       ciBaseObject* y2 = deps->at(i+2);
 243       if (x == y && x2 == y2) {  // same subjects; check the context
 244         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
 245           return;
 246         }
 247       }
 248     }
 249   }
 250   // append the assertion in the correct bucket:
 251   deps->append(ctxk);
 252   deps->append(x);
 253   deps->append(x2);
 254 }
 255 
 256 /// Support for encoding dependencies into an nmethod:
 257 
 258 void Dependencies::copy_to(nmethod* nm) {
 259   address beg = nm->dependencies_begin();
 260   address end = nm->dependencies_end();
 261   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
 262   Copy::disjoint_words((HeapWord*) content_bytes(),
 263                        (HeapWord*) beg,
 264                        size_in_bytes() / sizeof(HeapWord));
 265   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
 266 }
 267 
 268 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
 269   for (int i = 0; i < narg; i++) {
 270     int diff = p1[i]->ident() - p2[i]->ident();
 271     if (diff != 0)  return diff;
 272   }
 273   return 0;
 274 }
 275 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
 276 { return sort_dep(p1, p2, 1); }
 277 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
 278 { return sort_dep(p1, p2, 2); }
 279 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
 280 { return sort_dep(p1, p2, 3); }
 281 
 282 void Dependencies::sort_all_deps() {
 283   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 284     DepType dept = (DepType)deptv;
 285     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 286     if (deps->length() <= 1)  continue;
 287     switch (dep_args(dept)) {
 288     case 1: deps->sort(sort_dep_arg_1, 1); break;
 289     case 2: deps->sort(sort_dep_arg_2, 2); break;
 290     case 3: deps->sort(sort_dep_arg_3, 3); break;
 291     default: ShouldNotReachHere();
 292     }
 293   }
 294 }
 295 
 296 size_t Dependencies::estimate_size_in_bytes() {
 297   size_t est_size = 100;
 298   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 299     DepType dept = (DepType)deptv;
 300     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 301     est_size += deps->length()*2;  // tags and argument(s)
 302   }
 303   return est_size;
 304 }
 305 
 306 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
 307   switch (dept) {
 308   case abstract_with_exclusive_concrete_subtypes_2:
 309     return x->as_metadata()->as_klass();
 310   case unique_concrete_method:
 311   case exclusive_concrete_methods_2:
 312     return x->as_metadata()->as_method()->holder();
 313   }
 314   return NULL;  // let NULL be NULL
 315 }
 316 
 317 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
 318   assert(must_be_in_vm(), "raw oops here");
 319   switch (dept) {
 320   case abstract_with_exclusive_concrete_subtypes_2:
 321     assert(x->is_klass(), "sanity");
 322     return (Klass*) x;
 323   case unique_concrete_method:
 324   case exclusive_concrete_methods_2:
 325     assert(x->is_method(), "sanity");
 326     return ((Method*)x)->method_holder();
 327   }
 328   return NULL;  // let NULL be NULL
 329 }
 330 
 331 void Dependencies::encode_content_bytes() {
 332   sort_all_deps();
 333 
 334   // cast is safe, no deps can overflow INT_MAX
 335   CompressedWriteStream bytes((int)estimate_size_in_bytes());
 336 
 337   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 338     DepType dept = (DepType)deptv;
 339     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 340     if (deps->length() == 0)  continue;
 341     int stride = dep_args(dept);
 342     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 343     assert(stride > 0, "sanity");
 344     for (int i = 0; i < deps->length(); i += stride) {
 345       jbyte code_byte = (jbyte)dept;
 346       int skipj = -1;
 347       if (ctxkj >= 0 && ctxkj+1 < stride) {
 348         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
 349         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
 350         if (ctxk == ctxk_encoded_as_null(dept, x)) {
 351           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 352           code_byte |= default_context_type_bit;
 353         }
 354       }
 355       bytes.write_byte(code_byte);
 356       for (int j = 0; j < stride; j++) {
 357         if (j == skipj)  continue;
 358         ciBaseObject* v = deps->at(i+j);
 359         int idx;
 360         if (v->is_object()) {
 361           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
 362         } else {
 363           ciMetadata* meta = v->as_metadata();
 364           idx = _oop_recorder->find_index(meta->constant_encoding());
 365         }
 366         bytes.write_int(idx);
 367       }
 368     }
 369   }
 370 
 371   // write a sentinel byte to mark the end
 372   bytes.write_byte(end_marker);
 373 
 374   // round it out to a word boundary
 375   while (bytes.position() % sizeof(HeapWord) != 0) {
 376     bytes.write_byte(end_marker);
 377   }
 378 
 379   // check whether the dept byte encoding really works
 380   assert((jbyte)default_context_type_bit != 0, "byte overflow");
 381 
 382   _content_bytes = bytes.buffer();
 383   _size_in_bytes = bytes.position();
 384 }
 385 
 386 
 387 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
 388   "end_marker",
 389   "evol_method",
 390   "leaf_type",
 391   "abstract_with_unique_concrete_subtype",
 392   "abstract_with_no_concrete_subtype",
 393   "concrete_with_no_concrete_subtype",
 394   "unique_concrete_method",
 395   "abstract_with_exclusive_concrete_subtypes_2",
 396   "exclusive_concrete_methods_2",
 397   "no_finalizable_subclasses",
 398   "call_site_target_value",
 399   "constant_field_value_instance",
 400   "constant_field_value_klass"
 401 };
 402 
 403 int Dependencies::_dep_args[TYPE_LIMIT] = {
 404   -1,// end_marker
 405   1, // evol_method m
 406   1, // leaf_type ctxk
 407   2, // abstract_with_unique_concrete_subtype ctxk, k
 408   1, // abstract_with_no_concrete_subtype ctxk
 409   1, // concrete_with_no_concrete_subtype ctxk
 410   2, // unique_concrete_method ctxk, m
 411   3, // unique_concrete_subtypes_2 ctxk, k1, k2
 412   3, // unique_concrete_methods_2 ctxk, m1, m2
 413   1, // no_finalizable_subclasses ctxk
 414   2, // call_site_target_value call_site, method_handle
 415   2, // constant_field_value_instance ctxk oop
 416   1  // constant_field_value_klass ctxk
 417 };
 418 
 419 const char* Dependencies::dep_name(Dependencies::DepType dept) {
 420   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
 421   return _dep_name[dept];
 422 }
 423 
 424 int Dependencies::dep_args(Dependencies::DepType dept) {
 425   if (!dept_in_mask(dept, all_types))  return -1;
 426   return _dep_args[dept];
 427 }
 428 
 429 void Dependencies::check_valid_dependency_type(DepType dept) {
 430   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept));
 431 }
 432 
 433 // for the sake of the compiler log, print out current dependencies:
 434 void Dependencies::log_all_dependencies() {
 435   if (log() == NULL)  return;
 436   ResourceMark rm;
 437   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 438     DepType dept = (DepType)deptv;
 439     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 440     int deplen = deps->length();
 441     if (deplen == 0) {
 442       continue;
 443     }
 444     int stride = dep_args(dept);
 445     GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
 446     for (int i = 0; i < deps->length(); i += stride) {
 447       for (int j = 0; j < stride; j++) {
 448         // flush out the identities before printing
 449         ciargs->push(deps->at(i+j));
 450       }
 451       write_dependency_to(log(), dept, ciargs);
 452       ciargs->clear();
 453     }
 454     guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
 455   }
 456 }
 457 
 458 void Dependencies::write_dependency_to(CompileLog* log,
 459                                        DepType dept,
 460                                        GrowableArray<DepArgument>* args,
 461                                        Klass* witness) {
 462   if (log == NULL) {
 463     return;
 464   }
 465   ResourceMark rm;
 466   ciEnv* env = ciEnv::current();
 467   GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
 468   for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
 469     DepArgument arg = *it;
 470     if (arg.is_oop()) {
 471       ciargs->push(env->get_object(arg.oop_value()));
 472     } else {
 473       ciargs->push(env->get_metadata(arg.metadata_value()));
 474     }
 475   }
 476   int argslen = ciargs->length();
 477   Dependencies::write_dependency_to(log, dept, ciargs, witness);
 478   guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
 479 }
 480 
 481 void Dependencies::write_dependency_to(CompileLog* log,
 482                                        DepType dept,
 483                                        GrowableArray<ciBaseObject*>* args,
 484                                        Klass* witness) {
 485   if (log == NULL) {
 486     return;
 487   }
 488   ResourceMark rm;
 489   GrowableArray<int>* argids = new GrowableArray<int>(args->length());
 490   for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
 491     ciBaseObject* obj = *it;
 492     if (obj->is_object()) {
 493       argids->push(log->identify(obj->as_object()));
 494     } else {
 495       argids->push(log->identify(obj->as_metadata()));
 496     }
 497   }
 498   if (witness != NULL) {
 499     log->begin_elem("dependency_failed");
 500   } else {
 501     log->begin_elem("dependency");
 502   }
 503   log->print(" type='%s'", dep_name(dept));
 504   const int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 505   if (ctxkj >= 0 && ctxkj < argids->length()) {
 506     log->print(" ctxk='%d'", argids->at(ctxkj));
 507   }
 508   // write remaining arguments, if any.
 509   for (int j = 0; j < argids->length(); j++) {
 510     if (j == ctxkj)  continue;  // already logged
 511     if (j == 1) {
 512       log->print(  " x='%d'",    argids->at(j));
 513     } else {
 514       log->print(" x%d='%d'", j, argids->at(j));
 515     }
 516   }
 517   if (witness != NULL) {
 518     log->object("witness", witness);
 519     log->stamp();
 520   }
 521   log->end_elem();
 522 }
 523 
 524 void Dependencies::write_dependency_to(xmlStream* xtty,
 525                                        DepType dept,
 526                                        GrowableArray<DepArgument>* args,
 527                                        Klass* witness) {
 528   if (xtty == NULL) {
 529     return;
 530   }
 531   ResourceMark rm;
 532   ttyLocker ttyl;
 533   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 534   if (witness != NULL) {
 535     xtty->begin_elem("dependency_failed");
 536   } else {
 537     xtty->begin_elem("dependency");
 538   }
 539   xtty->print(" type='%s'", dep_name(dept));
 540   if (ctxkj >= 0) {
 541     xtty->object("ctxk", args->at(ctxkj).metadata_value());
 542   }
 543   // write remaining arguments, if any.
 544   for (int j = 0; j < args->length(); j++) {
 545     if (j == ctxkj)  continue;  // already logged
 546     DepArgument arg = args->at(j);
 547     if (j == 1) {
 548       if (arg.is_oop()) {
 549         xtty->object("x", arg.oop_value());
 550       } else {
 551         xtty->object("x", arg.metadata_value());
 552       }
 553     } else {
 554       char xn[10]; sprintf(xn, "x%d", j);
 555       if (arg.is_oop()) {
 556         xtty->object(xn, arg.oop_value());
 557       } else {
 558         xtty->object(xn, arg.metadata_value());
 559       }
 560     }
 561   }
 562   if (witness != NULL) {
 563     xtty->object("witness", witness);
 564     xtty->stamp();
 565   }
 566   xtty->end_elem();
 567 }
 568 
 569 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
 570                                     Klass* witness) {
 571   ResourceMark rm;
 572   ttyLocker ttyl;   // keep the following output all in one block
 573   tty->print_cr("%s of type %s",
 574                 (witness == NULL)? "Dependency": "Failed dependency",
 575                 dep_name(dept));
 576   // print arguments
 577   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 578   for (int j = 0; j < args->length(); j++) {
 579     DepArgument arg = args->at(j);
 580     bool put_star = false;
 581     if (arg.is_null())  continue;
 582     const char* what;
 583     if (j == ctxkj) {
 584       assert(arg.is_metadata(), "must be");
 585       what = "context";
 586       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
 587     } else if (arg.is_method()) {
 588       what = "method ";
 589       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
 590     } else if (arg.is_klass()) {
 591       what = "class  ";
 592     } else {
 593       what = "object ";
 594     }
 595     tty->print("  %s = %s", what, (put_star? "*": ""));
 596     if (arg.is_klass()) {
 597       tty->print("%s", ((Klass*)arg.metadata_value())->external_name());
 598     } else if (arg.is_method()) {
 599       ((Method*)arg.metadata_value())->print_value();
 600     } else if (arg.is_oop()) {
 601       arg.oop_value()->print_value_on(tty);
 602     } else {
 603       ShouldNotReachHere(); // Provide impl for this type.
 604     }
 605 
 606     tty->cr();
 607   }
 608   if (witness != NULL) {
 609     bool put_star = !Dependencies::is_concrete_klass(witness);
 610     tty->print_cr("  witness = %s%s",
 611                   (put_star? "*": ""),
 612                   witness->external_name());
 613   }
 614 }
 615 
 616 void Dependencies::DepStream::log_dependency(Klass* witness) {
 617   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
 618   ResourceMark rm;
 619   const int nargs = argument_count();
 620   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 621   for (int j = 0; j < nargs; j++) {
 622     if (is_oop_argument(j)) {
 623       args->push(argument_oop(j));
 624     } else {
 625       args->push(argument(j));
 626     }
 627   }
 628   int argslen = args->length();
 629   if (_deps != NULL && _deps->log() != NULL) {
 630     Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
 631   } else {
 632     Dependencies::write_dependency_to(xtty, type(), args, witness);
 633   }
 634   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 635 }
 636 
 637 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) {
 638   ResourceMark rm;
 639   int nargs = argument_count();
 640   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 641   for (int j = 0; j < nargs; j++) {
 642     if (is_oop_argument(j)) {
 643       args->push(argument_oop(j));
 644     } else {
 645       args->push(argument(j));
 646     }
 647   }
 648   int argslen = args->length();
 649   Dependencies::print_dependency(type(), args, witness);
 650   if (verbose) {
 651     if (_code != NULL) {
 652       tty->print("  code: ");
 653       _code->print_value_on(tty);
 654       tty->cr();
 655     }
 656   }
 657   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 658 }
 659 
 660 
 661 /// Dependency stream support (decodes dependencies from an nmethod):
 662 
 663 #ifdef ASSERT
 664 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
 665   assert(must_be_in_vm(), "raw oops here");
 666   _byte_limit = byte_limit;
 667   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
 668   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
 669 }
 670 #endif //ASSERT
 671 
 672 bool Dependencies::DepStream::next() {
 673   assert(_type != end_marker, "already at end");
 674   if (_bytes.position() == 0 && _code != NULL
 675       && _code->dependencies_size() == 0) {
 676     // Method has no dependencies at all.
 677     return false;
 678   }
 679   int code_byte = (_bytes.read_byte() & 0xFF);
 680   if (code_byte == end_marker) {
 681     DEBUG_ONLY(_type = end_marker);
 682     return false;
 683   } else {
 684     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
 685     code_byte -= ctxk_bit;
 686     DepType dept = (DepType)code_byte;
 687     _type = dept;
 688     Dependencies::check_valid_dependency_type(dept);
 689     int stride = _dep_args[dept];
 690     assert(stride == dep_args(dept), "sanity");
 691     int skipj = -1;
 692     if (ctxk_bit != 0) {
 693       skipj = 0;  // currently the only context argument is at zero
 694       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
 695     }
 696     for (int j = 0; j < stride; j++) {
 697       _xi[j] = (j == skipj)? 0: _bytes.read_int();
 698     }
 699     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
 700     return true;
 701   }
 702 }
 703 
 704 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
 705   Metadata* o = NULL;
 706   if (_code != NULL) {
 707     o = _code->metadata_at(i);
 708   } else {
 709     o = _deps->oop_recorder()->metadata_at(i);
 710   }
 711   return o;
 712 }
 713 
 714 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
 715   return (_code != NULL)
 716          ? _code->oop_at(i)
 717     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
 718 }
 719 
 720 Metadata* Dependencies::DepStream::argument(int i) {
 721   Metadata* result = recorded_metadata_at(argument_index(i));
 722 
 723   if (result == NULL) { // Explicit context argument can be compressed
 724     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 725     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
 726       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
 727     }
 728   }
 729 
 730   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
 731   return result;
 732 }
 733 
 734 /**
 735  * Returns a unique identifier for each dependency argument.
 736  */
 737 uintptr_t Dependencies::DepStream::get_identifier(int i) {
 738   if (is_oop_argument(i)) {
 739     return (uintptr_t)(oopDesc*)argument_oop(i);
 740   } else {
 741     return (uintptr_t)argument(i);
 742   }
 743 }
 744 
 745 oop Dependencies::DepStream::argument_oop(int i) {
 746   oop result = recorded_oop_at(argument_index(i));
 747   assert(result == NULL || result->is_oop(), "must be");
 748   return result;
 749 }
 750 
 751 Klass* Dependencies::DepStream::context_type() {
 752   assert(must_be_in_vm(), "raw oops here");
 753 
 754   // Most dependencies have an explicit context type argument.
 755   {
 756     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 757     if (ctxkj >= 0) {
 758       Metadata* k = argument(ctxkj);
 759       assert(k != NULL && k->is_klass(), "type check");
 760       return (Klass*)k;
 761     }
 762   }
 763 
 764   // Some dependencies are using the klass of the first object
 765   // argument as implicit context type.
 766   {
 767     int ctxkj = dep_implicit_context_arg(type());
 768     if (ctxkj >= 0) {
 769       Klass* k = argument_oop(ctxkj)->klass();
 770       assert(k != NULL && k->is_klass(), "type check");
 771       return (Klass*) k;
 772     }
 773   }
 774 
 775   // And some dependencies don't have a context type at all,
 776   // e.g. evol_method.
 777   return NULL;
 778 }
 779 
 780 // ----------------- DependencySignature --------------------------------------
 781 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
 782   if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
 783     return false;
 784   }
 785 
 786   for (int i = 0; i < s1.args_count(); i++) {
 787     if (s1.arg(i) != s2.arg(i)) {
 788       return false;
 789     }
 790   }
 791   return true;
 792 }
 793 
 794 /// Checking dependencies:
 795 
 796 // This hierarchy walker inspects subtypes of a given type,
 797 // trying to find a "bad" class which breaks a dependency.
 798 // Such a class is called a "witness" to the broken dependency.
 799 // While searching around, we ignore "participants", which
 800 // are already known to the dependency.
 801 class ClassHierarchyWalker {
 802  public:
 803   enum { PARTICIPANT_LIMIT = 3 };
 804 
 805  private:
 806   // optional method descriptor to check for:
 807   Symbol* _name;
 808   Symbol* _signature;
 809 
 810   // special classes which are not allowed to be witnesses:
 811   Klass*    _participants[PARTICIPANT_LIMIT+1];
 812   int       _num_participants;
 813 
 814   // cache of method lookups
 815   Method* _found_methods[PARTICIPANT_LIMIT+1];
 816 
 817   // if non-zero, tells how many witnesses to convert to participants
 818   int       _record_witnesses;
 819 
 820   void initialize(Klass* participant) {
 821     _record_witnesses = 0;
 822     _participants[0]  = participant;
 823     _found_methods[0] = NULL;
 824     _num_participants = 0;
 825     if (participant != NULL) {
 826       // Terminating NULL.
 827       _participants[1] = NULL;
 828       _found_methods[1] = NULL;
 829       _num_participants = 1;
 830     }
 831   }
 832 
 833   void initialize_from_method(Method* m) {
 834     assert(m != NULL && m->is_method(), "sanity");
 835     _name      = m->name();
 836     _signature = m->signature();
 837   }
 838 
 839  public:
 840   // The walker is initialized to recognize certain methods and/or types
 841   // as friendly participants.
 842   ClassHierarchyWalker(Klass* participant, Method* m) {
 843     initialize_from_method(m);
 844     initialize(participant);
 845   }
 846   ClassHierarchyWalker(Method* m) {
 847     initialize_from_method(m);
 848     initialize(NULL);
 849   }
 850   ClassHierarchyWalker(Klass* participant = NULL) {
 851     _name      = NULL;
 852     _signature = NULL;
 853     initialize(participant);
 854   }
 855 
 856   // This is common code for two searches:  One for concrete subtypes,
 857   // the other for concrete method implementations and overrides.
 858   bool doing_subtype_search() {
 859     return _name == NULL;
 860   }
 861 
 862   int num_participants() { return _num_participants; }
 863   Klass* participant(int n) {
 864     assert((uint)n <= (uint)_num_participants, "oob");
 865     return _participants[n];
 866   }
 867 
 868   // Note:  If n==num_participants, returns NULL.
 869   Method* found_method(int n) {
 870     assert((uint)n <= (uint)_num_participants, "oob");
 871     Method* fm = _found_methods[n];
 872     assert(n == _num_participants || fm != NULL, "proper usage");
 873     if (fm != NULL && fm->method_holder() != _participants[n]) {
 874       // Default methods from interfaces can be added to classes. In
 875       // that case the holder of the method is not the class but the
 876       // interface where it's defined.
 877       assert(fm->is_default_method(), "sanity");
 878       return NULL;
 879     }
 880     return fm;
 881   }
 882 
 883 #ifdef ASSERT
 884   // Assert that m is inherited into ctxk, without intervening overrides.
 885   // (May return true even if this is not true, in corner cases where we punt.)
 886   bool check_method_context(Klass* ctxk, Method* m) {
 887     if (m->method_holder() == ctxk)
 888       return true;  // Quick win.
 889     if (m->is_private())
 890       return false; // Quick lose.  Should not happen.
 891     if (!(m->is_public() || m->is_protected()))
 892       // The override story is complex when packages get involved.
 893       return true;  // Must punt the assertion to true.
 894     Klass* k = ctxk;
 895     Method* lm = k->lookup_method(m->name(), m->signature());
 896     if (lm == NULL && k->oop_is_instance()) {
 897       // It might be an interface method
 898         lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(),
 899                                                                 m->signature());
 900     }
 901     if (lm == m)
 902       // Method m is inherited into ctxk.
 903       return true;
 904     if (lm != NULL) {
 905       if (!(lm->is_public() || lm->is_protected())) {
 906         // Method is [package-]private, so the override story is complex.
 907         return true;  // Must punt the assertion to true.
 908       }
 909       if (lm->is_static()) {
 910         // Static methods don't override non-static so punt
 911         return true;
 912       }
 913       if (   !Dependencies::is_concrete_method(lm, k)
 914           && !Dependencies::is_concrete_method(m, ctxk)
 915           && lm->method_holder()->is_subtype_of(m->method_holder()))
 916         // Method m is overridden by lm, but both are non-concrete.
 917         return true;
 918     }
 919     ResourceMark rm;
 920     tty->print_cr("Dependency method not found in the associated context:");
 921     tty->print_cr("  context = %s", ctxk->external_name());
 922     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
 923     if (lm != NULL) {
 924       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
 925     }
 926     return false;
 927   }
 928 #endif
 929 
 930   void add_participant(Klass* participant) {
 931     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
 932     int np = _num_participants++;
 933     _participants[np] = participant;
 934     _participants[np+1] = NULL;
 935     _found_methods[np+1] = NULL;
 936   }
 937 
 938   void record_witnesses(int add) {
 939     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
 940     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
 941     _record_witnesses = add;
 942   }
 943 
 944   bool is_witness(Klass* k) {
 945     if (doing_subtype_search()) {
 946       return Dependencies::is_concrete_klass(k);
 947     } else if (!k->oop_is_instance()) {
 948       return false; // no methods to find in an array type
 949     } else {
 950       // Search class hierarchy first.
 951       Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature);
 952       if (!Dependencies::is_concrete_method(m, k)) {
 953         // Check interface defaults also, if any exist.
 954         Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
 955         if (default_methods == NULL)
 956             return false;
 957         m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
 958         if (!Dependencies::is_concrete_method(m, NULL))
 959             return false;
 960       }
 961       _found_methods[_num_participants] = m;
 962       // Note:  If add_participant(k) is called,
 963       // the method m will already be memoized for it.
 964       return true;
 965     }
 966   }
 967 
 968   bool is_participant(Klass* k) {
 969     if (k == _participants[0]) {
 970       return true;
 971     } else if (_num_participants <= 1) {
 972       return false;
 973     } else {
 974       return in_list(k, &_participants[1]);
 975     }
 976   }
 977   bool ignore_witness(Klass* witness) {
 978     if (_record_witnesses == 0) {
 979       return false;
 980     } else {
 981       --_record_witnesses;
 982       add_participant(witness);
 983       return true;
 984     }
 985   }
 986   static bool in_list(Klass* x, Klass** list) {
 987     for (int i = 0; ; i++) {
 988       Klass* y = list[i];
 989       if (y == NULL)  break;
 990       if (y == x)  return true;
 991     }
 992     return false;  // not in list
 993   }
 994 
 995  private:
 996   // the actual search method:
 997   Klass* find_witness_anywhere(Klass* context_type,
 998                                  bool participants_hide_witnesses,
 999                                  bool top_level_call = true);
1000   // the spot-checking version:
1001   Klass* find_witness_in(KlassDepChange& changes,
1002                          Klass* context_type,
1003                            bool participants_hide_witnesses);
1004  public:
1005   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
1006     assert(doing_subtype_search(), "must set up a subtype search");
1007     // When looking for unexpected concrete types,
1008     // do not look beneath expected ones.
1009     const bool participants_hide_witnesses = true;
1010     // CX > CC > C' is OK, even if C' is new.
1011     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
1012     if (changes != NULL) {
1013       return find_witness_in(*changes, context_type, participants_hide_witnesses);
1014     } else {
1015       return find_witness_anywhere(context_type, participants_hide_witnesses);
1016     }
1017   }
1018   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
1019     assert(!doing_subtype_search(), "must set up a method definer search");
1020     // When looking for unexpected concrete methods,
1021     // look beneath expected ones, to see if there are overrides.
1022     const bool participants_hide_witnesses = true;
1023     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1024     if (changes != NULL) {
1025       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
1026     } else {
1027       return find_witness_anywhere(context_type, !participants_hide_witnesses);
1028     }
1029   }
1030 };
1031 
1032 #ifndef PRODUCT
1033 static int deps_find_witness_calls = 0;
1034 static int deps_find_witness_steps = 0;
1035 static int deps_find_witness_recursions = 0;
1036 static int deps_find_witness_singles = 0;
1037 static int deps_find_witness_print = 0; // set to -1 to force a final print
1038 static bool count_find_witness_calls() {
1039   if (TraceDependencies || LogCompilation) {
1040     int pcount = deps_find_witness_print + 1;
1041     bool final_stats      = (pcount == 0);
1042     bool initial_call     = (pcount == 1);
1043     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1044     if (pcount < 0)  pcount = 1; // crude overflow protection
1045     deps_find_witness_print = pcount;
1046     if (VerifyDependencies && initial_call) {
1047       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1048     }
1049     if (occasional_print || final_stats) {
1050       // Every now and then dump a little info about dependency searching.
1051       if (xtty != NULL) {
1052        ttyLocker ttyl;
1053        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1054                    deps_find_witness_calls,
1055                    deps_find_witness_steps,
1056                    deps_find_witness_recursions,
1057                    deps_find_witness_singles);
1058       }
1059       if (final_stats || (TraceDependencies && WizardMode)) {
1060         ttyLocker ttyl;
1061         tty->print_cr("Dependency check (find_witness) "
1062                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1063                       deps_find_witness_calls,
1064                       deps_find_witness_steps,
1065                       (double)deps_find_witness_steps / deps_find_witness_calls,
1066                       deps_find_witness_recursions,
1067                       deps_find_witness_singles);
1068       }
1069     }
1070     return true;
1071   }
1072   return false;
1073 }
1074 #else
1075 #define count_find_witness_calls() (0)
1076 #endif //PRODUCT
1077 
1078 
1079 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1080                                                Klass* context_type,
1081                                                bool participants_hide_witnesses) {
1082   assert(changes.involves_context(context_type), "irrelevant dependency");
1083   Klass* new_type = changes.new_type();
1084 
1085   (void)count_find_witness_calls();
1086   NOT_PRODUCT(deps_find_witness_singles++);
1087 
1088   // Current thread must be in VM (not native mode, as in CI):
1089   assert(must_be_in_vm(), "raw oops here");
1090   // Must not move the class hierarchy during this check:
1091   assert_locked_or_safepoint(Compile_lock);
1092 
1093   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1094   if (nof_impls > 1) {
1095     // Avoid this case: *I.m > { A.m, C }; B.m > C
1096     // %%% Until this is fixed more systematically, bail out.
1097     // See corresponding comment in find_witness_anywhere.
1098     return context_type;
1099   }
1100 
1101   assert(!is_participant(new_type), "only old classes are participants");
1102   if (participants_hide_witnesses) {
1103     // If the new type is a subtype of a participant, we are done.
1104     for (int i = 0; i < num_participants(); i++) {
1105       Klass* part = participant(i);
1106       if (part == NULL)  continue;
1107       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1108              "correct marking of participants, b/c new_type is unique");
1109       if (changes.involves_context(part)) {
1110         // new guy is protected from this check by previous participant
1111         return NULL;
1112       }
1113     }
1114   }
1115 
1116   if (is_witness(new_type) &&
1117       !ignore_witness(new_type)) {
1118     return new_type;
1119   }
1120 
1121   return NULL;
1122 }
1123 
1124 
1125 // Walk hierarchy under a context type, looking for unexpected types.
1126 // Do not report participant types, and recursively walk beneath
1127 // them only if participants_hide_witnesses is false.
1128 // If top_level_call is false, skip testing the context type,
1129 // because the caller has already considered it.
1130 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1131                                                      bool participants_hide_witnesses,
1132                                                      bool top_level_call) {
1133   // Current thread must be in VM (not native mode, as in CI):
1134   assert(must_be_in_vm(), "raw oops here");
1135   // Must not move the class hierarchy during this check:
1136   assert_locked_or_safepoint(Compile_lock);
1137 
1138   bool do_counts = count_find_witness_calls();
1139 
1140   // Check the root of the sub-hierarchy first.
1141   if (top_level_call) {
1142     if (do_counts) {
1143       NOT_PRODUCT(deps_find_witness_calls++);
1144       NOT_PRODUCT(deps_find_witness_steps++);
1145     }
1146     if (is_participant(context_type)) {
1147       if (participants_hide_witnesses)  return NULL;
1148       // else fall through to search loop...
1149     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1150       // The context is an abstract class or interface, to start with.
1151       return context_type;
1152     }
1153   }
1154 
1155   // Now we must check each implementor and each subclass.
1156   // Use a short worklist to avoid blowing the stack.
1157   // Each worklist entry is a *chain* of subklass siblings to process.
1158   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1159   Klass* chains[CHAINMAX];
1160   int    chaini = 0;  // index into worklist
1161   Klass* chain;       // scratch variable
1162 #define ADD_SUBCLASS_CHAIN(k)                     {  \
1163     assert(chaini < CHAINMAX, "oob");                \
1164     chain = k->subklass();                           \
1165     if (chain != NULL)  chains[chaini++] = chain;    }
1166 
1167   // Look for non-abstract subclasses.
1168   // (Note:  Interfaces do not have subclasses.)
1169   ADD_SUBCLASS_CHAIN(context_type);
1170 
1171   // If it is an interface, search its direct implementors.
1172   // (Their subclasses are additional indirect implementors.
1173   // See InstanceKlass::add_implementor.)
1174   // (Note:  nof_implementors is always zero for non-interfaces.)
1175   if (top_level_call) {
1176     int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1177     if (nof_impls > 1) {
1178       // Avoid this case: *I.m > { A.m, C }; B.m > C
1179       // Here, I.m has 2 concrete implementations, but m appears unique
1180       // as A.m, because the search misses B.m when checking C.
1181       // The inherited method B.m was getting missed by the walker
1182       // when interface 'I' was the starting point.
1183       // %%% Until this is fixed more systematically, bail out.
1184       // (Old CHA had the same limitation.)
1185       return context_type;
1186     }
1187     if (nof_impls > 0) {
1188       Klass* impl = InstanceKlass::cast(context_type)->implementor();
1189       assert(impl != NULL, "just checking");
1190       // If impl is the same as the context_type, then more than one
1191       // implementor has seen. No exact info in this case.
1192       if (impl == context_type) {
1193         return context_type;  // report an inexact witness to this sad affair
1194       }
1195       if (do_counts)
1196         { NOT_PRODUCT(deps_find_witness_steps++); }
1197       if (is_participant(impl)) {
1198         if (!participants_hide_witnesses) {
1199           ADD_SUBCLASS_CHAIN(impl);
1200         }
1201       } else if (is_witness(impl) && !ignore_witness(impl)) {
1202         return impl;
1203       } else {
1204         ADD_SUBCLASS_CHAIN(impl);
1205       }
1206     }
1207   }
1208 
1209   // Recursively process each non-trivial sibling chain.
1210   while (chaini > 0) {
1211     Klass* chain = chains[--chaini];
1212     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1213       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1214       if (is_participant(sub)) {
1215         if (participants_hide_witnesses)  continue;
1216         // else fall through to process this guy's subclasses
1217       } else if (is_witness(sub) && !ignore_witness(sub)) {
1218         return sub;
1219       }
1220       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1221         // Fast path.  (Partially disabled if VerifyDependencies.)
1222         ADD_SUBCLASS_CHAIN(sub);
1223       } else {
1224         // Worklist overflow.  Do a recursive call.  Should be rare.
1225         // The recursive call will have its own worklist, of course.
1226         // (Note that sub has already been tested, so that there is
1227         // no need for the recursive call to re-test.  That's handy,
1228         // since the recursive call sees sub as the context_type.)
1229         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1230         Klass* witness = find_witness_anywhere(sub,
1231                                                  participants_hide_witnesses,
1232                                                  /*top_level_call=*/ false);
1233         if (witness != NULL)  return witness;
1234       }
1235     }
1236   }
1237 
1238   // No witness found.  The dependency remains unbroken.
1239   return NULL;
1240 #undef ADD_SUBCLASS_CHAIN
1241 }
1242 
1243 
1244 bool Dependencies::is_concrete_klass(Klass* k) {
1245   if (k->is_abstract())  return false;
1246   // %%% We could treat classes which are concrete but
1247   // have not yet been instantiated as virtually abstract.
1248   // This would require a deoptimization barrier on first instantiation.
1249   //if (k->is_not_instantiated())  return false;
1250   return true;
1251 }
1252 
1253 bool Dependencies::is_concrete_method(Method* m, Klass * k) {
1254   // NULL is not a concrete method,
1255   // statics are irrelevant to virtual call sites,
1256   // abstract methods are not concrete,
1257   // overpass (error) methods are not concrete if k is abstract
1258   //
1259   // note "true" is conservative answer --
1260   //     overpass clause is false if k == NULL, implies return true if
1261   //     answer depends on overpass clause.
1262   return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
1263              m->is_overpass() && k != NULL && k -> is_abstract() );
1264 }
1265 
1266 
1267 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1268   if (k->is_interface())  return NULL;
1269   if (k->has_finalizer()) return k;
1270   k = k->subklass();
1271   while (k != NULL) {
1272     Klass* result = find_finalizable_subclass(k);
1273     if (result != NULL) return result;
1274     k = k->next_sibling();
1275   }
1276   return NULL;
1277 }
1278 
1279 
1280 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1281   if (k->is_abstract())  return false;
1282   // We could also return false if k does not yet appear to be
1283   // instantiated, if the VM version supports this distinction also.
1284   //if (k->is_not_instantiated())  return false;
1285   return true;
1286 }
1287 
1288 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1289   return k->has_finalizable_subclass();
1290 }
1291 
1292 
1293 // Any use of the contents (bytecodes) of a method must be
1294 // marked by an "evol_method" dependency, if those contents
1295 // can change.  (Note: A method is always dependent on itself.)
1296 Klass* Dependencies::check_evol_method(Method* m) {
1297   assert(must_be_in_vm(), "raw oops here");
1298   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1299   // Or is there a now a breakpoint?
1300   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1301   if (m->is_old()
1302       || m->number_of_breakpoints() > 0) {
1303     return m->method_holder();
1304   } else {
1305     return NULL;
1306   }
1307 }
1308 
1309 // This is a strong assertion:  It is that the given type
1310 // has no subtypes whatever.  It is most useful for
1311 // optimizing checks on reflected types or on array types.
1312 // (Checks on types which are derived from real instances
1313 // can be optimized more strongly than this, because we
1314 // know that the checked type comes from a concrete type,
1315 // and therefore we can disregard abstract types.)
1316 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1317   assert(must_be_in_vm(), "raw oops here");
1318   assert_locked_or_safepoint(Compile_lock);
1319   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1320   Klass* sub = ctx->subklass();
1321   if (sub != NULL) {
1322     return sub;
1323   } else if (ctx->nof_implementors() != 0) {
1324     // if it is an interface, it must be unimplemented
1325     // (if it is not an interface, nof_implementors is always zero)
1326     Klass* impl = ctx->implementor();
1327     assert(impl != NULL, "must be set");
1328     return impl;
1329   } else {
1330     return NULL;
1331   }
1332 }
1333 
1334 // Test the assertion that conck is the only concrete subtype* of ctxk.
1335 // The type conck itself is allowed to have have further concrete subtypes.
1336 // This allows the compiler to narrow occurrences of ctxk by conck,
1337 // when dealing with the types of actual instances.
1338 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1339                                                                    Klass* conck,
1340                                                                    KlassDepChange* changes) {
1341   ClassHierarchyWalker wf(conck);
1342   return wf.find_witness_subtype(ctxk, changes);
1343 }
1344 
1345 // If a non-concrete class has no concrete subtypes, it is not (yet)
1346 // instantiatable.  This can allow the compiler to make some paths go
1347 // dead, if they are gated by a test of the type.
1348 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1349                                                                KlassDepChange* changes) {
1350   // Find any concrete subtype, with no participants:
1351   ClassHierarchyWalker wf;
1352   return wf.find_witness_subtype(ctxk, changes);
1353 }
1354 
1355 
1356 // If a concrete class has no concrete subtypes, it can always be
1357 // exactly typed.  This allows the use of a cheaper type test.
1358 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1359                                                                KlassDepChange* changes) {
1360   // Find any concrete subtype, with only the ctxk as participant:
1361   ClassHierarchyWalker wf(ctxk);
1362   return wf.find_witness_subtype(ctxk, changes);
1363 }
1364 
1365 
1366 // Find the unique concrete proper subtype of ctxk, or NULL if there
1367 // is more than one concrete proper subtype.  If there are no concrete
1368 // proper subtypes, return ctxk itself, whether it is concrete or not.
1369 // The returned subtype is allowed to have have further concrete subtypes.
1370 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1371 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1372   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1373   wf.record_witnesses(1);          // Record one other witness when walking.
1374   Klass* wit = wf.find_witness_subtype(ctxk);
1375   if (wit != NULL)  return NULL;   // Too many witnesses.
1376   Klass* conck = wf.participant(0);
1377   if (conck == NULL) {
1378 #ifndef PRODUCT
1379     // Make sure the dependency mechanism will pass this discovery:
1380     if (VerifyDependencies) {
1381       // Turn off dependency tracing while actually testing deps.
1382       FlagSetting fs(TraceDependencies, false);
1383       if (!Dependencies::is_concrete_klass(ctxk)) {
1384         guarantee(NULL ==
1385                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
1386                   "verify dep.");
1387       } else {
1388         guarantee(NULL ==
1389                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
1390                   "verify dep.");
1391       }
1392     }
1393 #endif //PRODUCT
1394     return ctxk;                   // Return ctxk as a flag for "no subtypes".
1395   } else {
1396 #ifndef PRODUCT
1397     // Make sure the dependency mechanism will pass this discovery:
1398     if (VerifyDependencies) {
1399       // Turn off dependency tracing while actually testing deps.
1400       FlagSetting fs(TraceDependencies, false);
1401       if (!Dependencies::is_concrete_klass(ctxk)) {
1402         guarantee(NULL == (void *)
1403                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
1404                   "verify dep.");
1405       }
1406     }
1407 #endif //PRODUCT
1408     return conck;
1409   }
1410 }
1411 
1412 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1413 // except possibly for further subtypes of k[12] themselves.
1414 // The context type must be abstract.  The types k1 and k2 are themselves
1415 // allowed to have further concrete subtypes.
1416 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1417                                                 Klass* ctxk,
1418                                                 Klass* k1,
1419                                                 Klass* k2,
1420                                                 KlassDepChange* changes) {
1421   ClassHierarchyWalker wf;
1422   wf.add_participant(k1);
1423   wf.add_participant(k2);
1424   return wf.find_witness_subtype(ctxk, changes);
1425 }
1426 
1427 // Search ctxk for concrete implementations.  If there are klen or fewer,
1428 // pack them into the given array and return the number.
1429 // Otherwise, return -1, meaning the given array would overflow.
1430 // (Note that a return of 0 means there are exactly no concrete subtypes.)
1431 // In this search, if ctxk is concrete, it will be reported alone.
1432 // For any type CC reported, no proper subtypes of CC will be reported.
1433 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1434                                                    int klen,
1435                                                    Klass* karray[]) {
1436   ClassHierarchyWalker wf;
1437   wf.record_witnesses(klen);
1438   Klass* wit = wf.find_witness_subtype(ctxk);
1439   if (wit != NULL)  return -1;  // Too many witnesses.
1440   int num = wf.num_participants();
1441   assert(num <= klen, "oob");
1442   // Pack the result array with the good news.
1443   for (int i = 0; i < num; i++)
1444     karray[i] = wf.participant(i);
1445 #ifndef PRODUCT
1446   // Make sure the dependency mechanism will pass this discovery:
1447   if (VerifyDependencies) {
1448     // Turn off dependency tracing while actually testing deps.
1449     FlagSetting fs(TraceDependencies, false);
1450     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1451     case -1: // ctxk was itself concrete
1452       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1453       break;
1454     case 0:
1455       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1456                 "verify dep.");
1457       break;
1458     case 1:
1459       guarantee(NULL == (void *)
1460                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1461                 "verify dep.");
1462       break;
1463     case 2:
1464       guarantee(NULL == (void *)
1465                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
1466                                                                 karray[0],
1467                                                                 karray[1]),
1468                 "verify dep.");
1469       break;
1470     default:
1471       ShouldNotReachHere();  // klen > 2 yet supported
1472     }
1473   }
1474 #endif //PRODUCT
1475   return num;
1476 }
1477 
1478 // If a class (or interface) has a unique concrete method uniqm, return NULL.
1479 // Otherwise, return a class that contains an interfering method.
1480 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1481                                                     KlassDepChange* changes) {
1482   // Here is a missing optimization:  If uniqm->is_final(),
1483   // we don't really need to search beneath it for overrides.
1484   // This is probably not important, since we don't use dependencies
1485   // to track final methods.  (They can't be "definalized".)
1486   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1487   return wf.find_witness_definer(ctxk, changes);
1488 }
1489 
1490 // Find the set of all non-abstract methods under ctxk that match m.
1491 // (The method m must be defined or inherited in ctxk.)
1492 // Include m itself in the set, unless it is abstract.
1493 // If this set has exactly one element, return that element.
1494 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1495   // Return NULL if m is marked old; must have been a redefined method.
1496   if (m->is_old()) {
1497     return NULL;
1498   }
1499   ClassHierarchyWalker wf(m);
1500   assert(wf.check_method_context(ctxk, m), "proper context");
1501   wf.record_witnesses(1);
1502   Klass* wit = wf.find_witness_definer(ctxk);
1503   if (wit != NULL)  return NULL;  // Too many witnesses.
1504   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1505   if (Dependencies::is_concrete_method(m, ctxk)) {
1506     if (fm == NULL) {
1507       // It turns out that m was always the only implementation.
1508       fm = m;
1509     } else if (fm != m) {
1510       // Two conflicting implementations after all.
1511       // (This can happen if m is inherited into ctxk and fm overrides it.)
1512       return NULL;
1513     }
1514   }
1515 #ifndef PRODUCT
1516   // Make sure the dependency mechanism will pass this discovery:
1517   if (VerifyDependencies && fm != NULL) {
1518     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1519               "verify dep.");
1520   }
1521 #endif //PRODUCT
1522   return fm;
1523 }
1524 
1525 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1526                                                         Method* m1,
1527                                                         Method* m2,
1528                                                         KlassDepChange* changes) {
1529   ClassHierarchyWalker wf(m1);
1530   wf.add_participant(m1->method_holder());
1531   wf.add_participant(m2->method_holder());
1532   return wf.find_witness_definer(ctxk, changes);
1533 }
1534 
1535 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1536   Klass* search_at = ctxk;
1537   if (changes != NULL)
1538     search_at = changes->new_type(); // just look at the new bit
1539   return find_finalizable_subclass(search_at);
1540 }
1541 
1542 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1543   assert(!oopDesc::is_null(call_site), "sanity");
1544   assert(!oopDesc::is_null(method_handle), "sanity");
1545   assert(call_site->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1546 
1547   if (changes == NULL) {
1548     // Validate all CallSites
1549     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1550       return call_site->klass();  // assertion failed
1551   } else {
1552     // Validate the given CallSite
1553     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1554       assert(method_handle != changes->method_handle(), "must be");
1555       return call_site->klass();  // assertion failed
1556     }
1557   }
1558   return NULL;  // assertion still valid
1559 }
1560 
1561 void Dependencies::invalidate_dependent_nmethods(instanceKlassHandle ctxk, DepChange& changes, TRAPS) {
1562   MutexLocker mu(Compile_lock, THREAD);
1563 
1564   int marked = 0;
1565   {
1566     MutexLockerEx mu2(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1567     marked = ctxk->mark_dependent_nmethods(changes);
1568   }
1569   if (marked > 0) {
1570     ctxk->set_finals(true);
1571     // At least one nmethod has been marked for deoptimization
1572     VM_Deoptimize op;
1573     VMThread::execute(&op);
1574   }
1575 }
1576 
1577 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1578   if (witness != NULL) {
1579     if (TraceDependencies) {
1580       print_dependency(witness, /*verbose=*/ true);
1581     }
1582     // The following is a no-op unless logging is enabled:
1583     log_dependency(witness);
1584   }
1585 }
1586 
1587 
1588 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1589   assert_locked_or_safepoint(Compile_lock);
1590   Dependencies::check_valid_dependency_type(type());
1591 
1592   Klass* witness = NULL;
1593   switch (type()) {
1594   case evol_method:
1595     witness = check_evol_method(method_argument(0));
1596     break;
1597   case leaf_type:
1598     witness = check_leaf_type(context_type());
1599     break;
1600   case abstract_with_unique_concrete_subtype:
1601     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1602     break;
1603   case abstract_with_no_concrete_subtype:
1604     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1605     break;
1606   case concrete_with_no_concrete_subtype:
1607     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1608     break;
1609   case unique_concrete_method:
1610     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1611     break;
1612   case abstract_with_exclusive_concrete_subtypes_2:
1613     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1614     break;
1615   case exclusive_concrete_methods_2:
1616     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1617     break;
1618   case no_finalizable_subclasses:
1619     witness = check_has_no_finalizable_subclasses(context_type(), changes);
1620     break;
1621   default:
1622     witness = NULL;
1623     break;
1624   }
1625   trace_and_log_witness(witness);
1626   return witness;
1627 }
1628 
1629 
1630 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1631   assert_locked_or_safepoint(Compile_lock);
1632   Dependencies::check_valid_dependency_type(type());
1633 
1634   Klass* witness = NULL;
1635   switch (type()) {
1636   case call_site_target_value:
1637     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1638     break;
1639   default:
1640     witness = NULL;
1641     break;
1642   }
1643   trace_and_log_witness(witness);
1644   return witness;
1645 }
1646 
1647 
1648 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1649   // Handle klass dependency
1650   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1651     return check_klass_dependency(changes.as_klass_change());
1652 
1653   // Handle CallSite dependency
1654   if (changes.is_call_site_change())
1655     return check_call_site_dependency(changes.as_call_site_change());
1656 
1657   if (changes.is_constant_field_change()) {
1658     Handle holder = changes.as_constant_field_change()->holder();
1659     int offset = changes.as_constant_field_change()->offset();
1660     int dep_offset = -1; // TODO: store offset in dependency
1661     switch (type()) {
1662       case constant_field_value_instance:
1663         if (holder.is_null())        return context_type(); // all oops
1664         if (holder() == argument_oop(1)) {
1665           if (offset == -1)          return context_type(); // all fields
1666           if (offset == dep_offset)  return context_type(); // same field
1667         }
1668         break;
1669       case constant_field_value_klass:
1670         if (offset == -1)          return context_type(); // all fields
1671         if (offset == dep_offset)  return context_type(); // same field
1672         break;
1673     }
1674   }
1675 
1676   // irrelevant dependency; skip it
1677   return NULL;
1678 }
1679 
1680 
1681 void DepChange::print() {
1682   if (is_klass_change())
1683     tty->print_cr("klass_change");
1684   if (is_call_site_change())
1685     tty->print_cr("call_site_change");
1686   if (is_constant_field_change())
1687     tty->print_cr("constant_field_change: offset=%d %s", as_constant_field_change()->offset(), as_constant_field_change()->holder()->print_string());
1688   int nsup = 0, nint = 0;
1689   for (ContextStream str(*this); str.next(); ) {
1690     Klass* k = str.klass();
1691     switch (str.change_type()) {
1692     case Change_new_type:
1693       tty->print_cr("  dependee = %s", InstanceKlass::cast(k)->external_name());
1694       break;
1695     case Change_new_sub:
1696       if (!WizardMode) {
1697         ++nsup;
1698       } else {
1699         tty->print_cr("  context super = %s", InstanceKlass::cast(k)->external_name());
1700       }
1701       break;
1702     case Change_new_impl:
1703       if (!WizardMode) {
1704         ++nint;
1705       } else {
1706         tty->print_cr("  context interface = %s", InstanceKlass::cast(k)->external_name());
1707       }
1708       break;
1709     }
1710   }
1711   if (nsup + nint != 0) {
1712     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1713   }
1714 }
1715 
1716 void DepChange::ContextStream::start() {
1717   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1718   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1719   _klass = new_type;
1720   _ti_base = NULL;
1721   _ti_index = 0;
1722   _ti_limit = 0;
1723 }
1724 
1725 bool DepChange::ContextStream::next() {
1726   switch (_change_type) {
1727   case Start_Klass:             // initial state; _klass is the new type
1728     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1729     _ti_index = 0;
1730     _change_type = Change_new_type;
1731     return true;
1732   case Change_new_type:
1733     // fall through:
1734     _change_type = Change_new_sub;
1735   case Change_new_sub:
1736     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1737     {
1738       _klass = InstanceKlass::cast(_klass)->super();
1739       if (_klass != NULL) {
1740         return true;
1741       }
1742     }
1743     // else set up _ti_limit and fall through:
1744     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1745     _change_type = Change_new_impl;
1746   case Change_new_impl:
1747     if (_ti_index < _ti_limit) {
1748       _klass = _ti_base->at(_ti_index++);
1749       return true;
1750     }
1751     // fall through:
1752     _change_type = NO_CHANGE;  // iterator is exhausted
1753   case NO_CHANGE:
1754     break;
1755   default:
1756     ShouldNotReachHere();
1757   }
1758   return false;
1759 }
1760 
1761 void KlassDepChange::initialize() {
1762   // entire transaction must be under this lock:
1763   assert_lock_strong(Compile_lock);
1764 
1765   // Mark all dependee and all its superclasses
1766   // Mark transitive interfaces
1767   for (ContextStream str(*this); str.next(); ) {
1768     Klass* d = str.klass();
1769     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1770     InstanceKlass::cast(d)->set_is_marked_dependent(true);
1771   }
1772 }
1773 
1774 KlassDepChange::~KlassDepChange() {
1775   // Unmark all dependee and all its superclasses
1776   // Unmark transitive interfaces
1777   for (ContextStream str(*this); str.next(); ) {
1778     Klass* d = str.klass();
1779     InstanceKlass::cast(d)->set_is_marked_dependent(false);
1780   }
1781 }
1782 
1783 bool KlassDepChange::involves_context(Klass* k) {
1784   if (k == NULL || !k->oop_is_instance()) {
1785     return false;
1786   }
1787   InstanceKlass* ik = InstanceKlass::cast(k);
1788   bool is_contained = ik->is_marked_dependent();
1789   assert(is_contained == new_type()->is_subtype_of(k),
1790          "correct marking of potential context types");
1791   return is_contained;
1792 }
1793 
1794 #ifndef PRODUCT
1795 void Dependencies::print_statistics() {
1796   if (deps_find_witness_print != 0) {
1797     // Call one final time, to flush out the data.
1798     deps_find_witness_print = -1;
1799     count_find_witness_calls();
1800   }
1801 }
1802 #endif