1 /*
   2  * Copyright (c) 2005, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciArrayKlass.hpp"
  27 #include "ci/ciEnv.hpp"
  28 #include "ci/ciKlass.hpp"
  29 #include "ci/ciMethod.hpp"
  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "code/dependencies.hpp"
  32 #include "compiler/compileLog.hpp"
  33 #include "compiler/compileBroker.hpp"
  34 #include "compiler/compileTask.hpp"
  35 #include "memory/resourceArea.hpp"
  36 #include "oops/klass.hpp"
  37 #include "oops/oop.inline.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "runtime/flags/flagSetting.hpp"
  40 #include "runtime/handles.hpp"
  41 #include "runtime/handles.inline.hpp"
  42 #include "runtime/jniHandles.inline.hpp"
  43 #include "runtime/thread.inline.hpp"
  44 #include "utilities/copy.hpp"
  45 
  46 
  47 #ifdef ASSERT
  48 static bool must_be_in_vm() {
  49   Thread* thread = Thread::current();
  50   if (thread->is_Java_thread())
  51     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  52   else
  53     return true;  //something like this: thread->is_VM_thread();
  54 }
  55 #endif //ASSERT
  56 
  57 void Dependencies::initialize(ciEnv* env) {
  58   Arena* arena = env->arena();
  59   _oop_recorder = env->oop_recorder();
  60   _log = env->log();
  61   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  62 #if INCLUDE_JVMCI
  63   _using_dep_values = false;
  64 #endif
  65   DEBUG_ONLY(_deps[end_marker] = NULL);
  66   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  67     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  68   }
  69   _content_bytes = NULL;
  70   _size_in_bytes = (size_t)-1;
  71 
  72   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  73 }
  74 
  75 void Dependencies::assert_evol_method(ciMethod* m) {
  76   assert_common_1(evol_method, m);
  77 }
  78 
  79 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  80   if (ctxk->is_array_klass()) {
  81     // As a special case, support this assertion on an array type,
  82     // which reduces to an assertion on its element type.
  83     // Note that this cannot be done with assertions that
  84     // relate to concreteness or abstractness.
  85     ciType* elemt = ctxk->as_array_klass()->base_element_type();
  86     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
  87     ctxk = elemt->as_instance_klass();
  88     //if (ctxk->is_final())  return;            // Ex:  String[][]
  89   }
  90   check_ctxk(ctxk);
  91   assert_common_1(leaf_type, ctxk);
  92 }
  93 
  94 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  95   check_ctxk_abstract(ctxk);
  96   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  97 }
  98 
  99 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
 100   check_ctxk_abstract(ctxk);
 101   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
 102 }
 103 
 104 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
 105   check_ctxk_concrete(ctxk);
 106   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
 107 }
 108 
 109 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
 110   check_ctxk(ctxk);
 111   check_unique_method(ctxk, uniqm);
 112   assert_common_2(unique_concrete_method, ctxk, uniqm);
 113 }
 114 
 115 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
 116   check_ctxk(ctxk);
 117   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
 118 }
 119 
 120 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
 121   check_ctxk(ctxk);
 122   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
 123 }
 124 
 125 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
 126   check_ctxk(ctxk);
 127   assert_common_1(no_finalizable_subclasses, ctxk);
 128 }
 129 
 130 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
 131   assert_common_2(call_site_target_value, call_site, method_handle);
 132 }
 133 
 134 #if INCLUDE_JVMCI
 135 
 136 Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
 137   _oop_recorder = oop_recorder;
 138   _log = log;
 139   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
 140   _using_dep_values = true;
 141   DEBUG_ONLY(_dep_values[end_marker] = NULL);
 142   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
 143     _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
 144   }
 145   _content_bytes = NULL;
 146   _size_in_bytes = (size_t)-1;
 147 
 148   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
 149 }
 150 
 151 void Dependencies::assert_evol_method(Method* m) {
 152   assert_common_1(evol_method, DepValue(_oop_recorder, m));
 153 }
 154 
 155 void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
 156   check_ctxk(ctxk);
 157   assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
 158 }
 159 
 160 void Dependencies::assert_leaf_type(Klass* ctxk) {
 161   if (ctxk->is_array_klass()) {
 162     // As a special case, support this assertion on an array type,
 163     // which reduces to an assertion on its element type.
 164     // Note that this cannot be done with assertions that
 165     // relate to concreteness or abstractness.
 166     BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
 167     if (is_java_primitive(elemt))  return;   // Ex:  int[][]
 168     ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
 169     //if (ctxk->is_final())  return;            // Ex:  String[][]
 170   }
 171   check_ctxk(ctxk);
 172   assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
 173 }
 174 
 175 void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
 176   check_ctxk_abstract(ctxk);
 177   DepValue ctxk_dv(_oop_recorder, ctxk);
 178   DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
 179   assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
 180 }
 181 
 182 void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
 183   check_ctxk(ctxk);
 184   check_unique_method(ctxk, uniqm);
 185   assert_common_2(unique_concrete_method, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
 186 }
 187 
 188 void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
 189   assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
 190 }
 191 
 192 #endif // INCLUDE_JVMCI
 193 
 194 
 195 // Helper function.  If we are adding a new dep. under ctxk2,
 196 // try to find an old dep. under a broader* ctxk1.  If there is
 197 //
 198 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
 199                                     int ctxk_i, ciKlass* ctxk2) {
 200   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
 201   if (ctxk2->is_subtype_of(ctxk1)) {
 202     return true;  // success, and no need to change
 203   } else if (ctxk1->is_subtype_of(ctxk2)) {
 204     // new context class fully subsumes previous one
 205     deps->at_put(ctxk_i, ctxk2);
 206     return true;
 207   } else {
 208     return false;
 209   }
 210 }
 211 
 212 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
 213   assert(dep_args(dept) == 1, "sanity");
 214   log_dependency(dept, x);
 215   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 216 
 217   // see if the same (or a similar) dep is already recorded
 218   if (note_dep_seen(dept, x)) {
 219     assert(deps->find(x) >= 0, "sanity");
 220   } else {
 221     deps->append(x);
 222   }
 223 }
 224 
 225 void Dependencies::assert_common_2(DepType dept,
 226                                    ciBaseObject* x0, ciBaseObject* x1) {
 227   assert(dep_args(dept) == 2, "sanity");
 228   log_dependency(dept, x0, x1);
 229   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 230 
 231   // see if the same (or a similar) dep is already recorded
 232   bool has_ctxk = has_explicit_context_arg(dept);
 233   if (has_ctxk) {
 234     assert(dep_context_arg(dept) == 0, "sanity");
 235     if (note_dep_seen(dept, x1)) {
 236       // look in this bucket for redundant assertions
 237       const int stride = 2;
 238       for (int i = deps->length(); (i -= stride) >= 0; ) {
 239         ciBaseObject* y1 = deps->at(i+1);
 240         if (x1 == y1) {  // same subject; check the context
 241           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
 242             return;
 243           }
 244         }
 245       }
 246     }
 247   } else {
 248     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 249       // look in this bucket for redundant assertions
 250       const int stride = 2;
 251       for (int i = deps->length(); (i -= stride) >= 0; ) {
 252         ciBaseObject* y0 = deps->at(i+0);
 253         ciBaseObject* y1 = deps->at(i+1);
 254         if (x0 == y0 && x1 == y1) {
 255           return;
 256         }
 257       }
 258     }
 259   }
 260 
 261   // append the assertion in the correct bucket:
 262   deps->append(x0);
 263   deps->append(x1);
 264 }
 265 
 266 void Dependencies::assert_common_3(DepType dept,
 267                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
 268   assert(dep_context_arg(dept) == 0, "sanity");
 269   assert(dep_args(dept) == 3, "sanity");
 270   log_dependency(dept, ctxk, x, x2);
 271   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 272 
 273   // try to normalize an unordered pair:
 274   bool swap = false;
 275   switch (dept) {
 276   case abstract_with_exclusive_concrete_subtypes_2:
 277     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
 278     break;
 279   case exclusive_concrete_methods_2:
 280     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
 281     break;
 282   default:
 283     break;
 284   }
 285   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
 286 
 287   // see if the same (or a similar) dep is already recorded
 288   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
 289     // look in this bucket for redundant assertions
 290     const int stride = 3;
 291     for (int i = deps->length(); (i -= stride) >= 0; ) {
 292       ciBaseObject* y  = deps->at(i+1);
 293       ciBaseObject* y2 = deps->at(i+2);
 294       if (x == y && x2 == y2) {  // same subjects; check the context
 295         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
 296           return;
 297         }
 298       }
 299     }
 300   }
 301   // append the assertion in the correct bucket:
 302   deps->append(ctxk);
 303   deps->append(x);
 304   deps->append(x2);
 305 }
 306 
 307 #if INCLUDE_JVMCI
 308 bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
 309                                     int ctxk_i, DepValue ctxk2_dv) {
 310   Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
 311   Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
 312   if (ctxk2->is_subtype_of(ctxk1)) {
 313     return true;  // success, and no need to change
 314   } else if (ctxk1->is_subtype_of(ctxk2)) {
 315     // new context class fully subsumes previous one
 316     deps->at_put(ctxk_i, ctxk2_dv);
 317     return true;
 318   } else {
 319     return false;
 320   }
 321 }
 322 
 323 void Dependencies::assert_common_1(DepType dept, DepValue x) {
 324   assert(dep_args(dept) == 1, "sanity");
 325   //log_dependency(dept, x);
 326   GrowableArray<DepValue>* deps = _dep_values[dept];
 327 
 328   // see if the same (or a similar) dep is already recorded
 329   if (note_dep_seen(dept, x)) {
 330     assert(deps->find(x) >= 0, "sanity");
 331   } else {
 332     deps->append(x);
 333   }
 334 }
 335 
 336 void Dependencies::assert_common_2(DepType dept,
 337                                    DepValue x0, DepValue x1) {
 338   assert(dep_args(dept) == 2, "sanity");
 339   //log_dependency(dept, x0, x1);
 340   GrowableArray<DepValue>* deps = _dep_values[dept];
 341 
 342   // see if the same (or a similar) dep is already recorded
 343   bool has_ctxk = has_explicit_context_arg(dept);
 344   if (has_ctxk) {
 345     assert(dep_context_arg(dept) == 0, "sanity");
 346     if (note_dep_seen(dept, x1)) {
 347       // look in this bucket for redundant assertions
 348       const int stride = 2;
 349       for (int i = deps->length(); (i -= stride) >= 0; ) {
 350         DepValue y1 = deps->at(i+1);
 351         if (x1 == y1) {  // same subject; check the context
 352           if (maybe_merge_ctxk(deps, i+0, x0)) {
 353             return;
 354           }
 355         }
 356       }
 357     }
 358   } else {
 359     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 360       // look in this bucket for redundant assertions
 361       const int stride = 2;
 362       for (int i = deps->length(); (i -= stride) >= 0; ) {
 363         DepValue y0 = deps->at(i+0);
 364         DepValue y1 = deps->at(i+1);
 365         if (x0 == y0 && x1 == y1) {
 366           return;
 367         }
 368       }
 369     }
 370   }
 371 
 372   // append the assertion in the correct bucket:
 373   deps->append(x0);
 374   deps->append(x1);
 375 }
 376 #endif // INCLUDE_JVMCI
 377 
 378 /// Support for encoding dependencies into an nmethod:
 379 
 380 void Dependencies::copy_to(nmethod* nm) {
 381   address beg = nm->dependencies_begin();
 382   address end = nm->dependencies_end();
 383   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
 384   Copy::disjoint_words((HeapWord*) content_bytes(),
 385                        (HeapWord*) beg,
 386                        size_in_bytes() / sizeof(HeapWord));
 387   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
 388 }
 389 
 390 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
 391   for (int i = 0; i < narg; i++) {
 392     int diff = p1[i]->ident() - p2[i]->ident();
 393     if (diff != 0)  return diff;
 394   }
 395   return 0;
 396 }
 397 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
 398 { return sort_dep(p1, p2, 1); }
 399 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
 400 { return sort_dep(p1, p2, 2); }
 401 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
 402 { return sort_dep(p1, p2, 3); }
 403 
 404 #if INCLUDE_JVMCI
 405 // metadata deps are sorted before object deps
 406 static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
 407   for (int i = 0; i < narg; i++) {
 408     int diff = p1[i].sort_key() - p2[i].sort_key();
 409     if (diff != 0)  return diff;
 410   }
 411   return 0;
 412 }
 413 static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 414 { return sort_dep_value(p1, p2, 1); }
 415 static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 416 { return sort_dep_value(p1, p2, 2); }
 417 static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 418 { return sort_dep_value(p1, p2, 3); }
 419 #endif // INCLUDE_JVMCI
 420 
 421 void Dependencies::sort_all_deps() {
 422 #if INCLUDE_JVMCI
 423   if (_using_dep_values) {
 424     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 425       DepType dept = (DepType)deptv;
 426       GrowableArray<DepValue>* deps = _dep_values[dept];
 427       if (deps->length() <= 1)  continue;
 428       switch (dep_args(dept)) {
 429       case 1: deps->sort(sort_dep_value_arg_1, 1); break;
 430       case 2: deps->sort(sort_dep_value_arg_2, 2); break;
 431       case 3: deps->sort(sort_dep_value_arg_3, 3); break;
 432       default: ShouldNotReachHere(); break;
 433       }
 434     }
 435     return;
 436   }
 437 #endif // INCLUDE_JVMCI
 438   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 439     DepType dept = (DepType)deptv;
 440     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 441     if (deps->length() <= 1)  continue;
 442     switch (dep_args(dept)) {
 443     case 1: deps->sort(sort_dep_arg_1, 1); break;
 444     case 2: deps->sort(sort_dep_arg_2, 2); break;
 445     case 3: deps->sort(sort_dep_arg_3, 3); break;
 446     default: ShouldNotReachHere(); break;
 447     }
 448   }
 449 }
 450 
 451 size_t Dependencies::estimate_size_in_bytes() {
 452   size_t est_size = 100;
 453 #if INCLUDE_JVMCI
 454   if (_using_dep_values) {
 455     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 456       DepType dept = (DepType)deptv;
 457       GrowableArray<DepValue>* deps = _dep_values[dept];
 458       est_size += deps->length() * 2;  // tags and argument(s)
 459     }
 460     return est_size;
 461   }
 462 #endif // INCLUDE_JVMCI
 463   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 464     DepType dept = (DepType)deptv;
 465     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 466     est_size += deps->length()*2;  // tags and argument(s)
 467   }
 468   return est_size;
 469 }
 470 
 471 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
 472   switch (dept) {
 473   case abstract_with_exclusive_concrete_subtypes_2:
 474     return x->as_metadata()->as_klass();
 475   case unique_concrete_method:
 476   case exclusive_concrete_methods_2:
 477     return x->as_metadata()->as_method()->holder();
 478   default:
 479     return NULL;  // let NULL be NULL
 480   }
 481 }
 482 
 483 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
 484   assert(must_be_in_vm(), "raw oops here");
 485   switch (dept) {
 486   case abstract_with_exclusive_concrete_subtypes_2:
 487     assert(x->is_klass(), "sanity");
 488     return (Klass*) x;
 489   case unique_concrete_method:
 490   case exclusive_concrete_methods_2:
 491     assert(x->is_method(), "sanity");
 492     return ((Method*)x)->method_holder();
 493   default:
 494     return NULL;  // let NULL be NULL
 495   }
 496 }
 497 
 498 void Dependencies::encode_content_bytes() {
 499   sort_all_deps();
 500 
 501   // cast is safe, no deps can overflow INT_MAX
 502   CompressedWriteStream bytes((int)estimate_size_in_bytes());
 503 
 504 #if INCLUDE_JVMCI
 505   if (_using_dep_values) {
 506     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 507       DepType dept = (DepType)deptv;
 508       GrowableArray<DepValue>* deps = _dep_values[dept];
 509       if (deps->length() == 0)  continue;
 510       int stride = dep_args(dept);
 511       int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 512       assert(stride > 0, "sanity");
 513       for (int i = 0; i < deps->length(); i += stride) {
 514         jbyte code_byte = (jbyte)dept;
 515         int skipj = -1;
 516         if (ctxkj >= 0 && ctxkj+1 < stride) {
 517           Klass*  ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
 518           DepValue x = deps->at(i+ctxkj+1);  // following argument
 519           if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
 520             skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 521             code_byte |= default_context_type_bit;
 522           }
 523         }
 524         bytes.write_byte(code_byte);
 525         for (int j = 0; j < stride; j++) {
 526           if (j == skipj)  continue;
 527           DepValue v = deps->at(i+j);
 528           int idx = v.index();
 529           bytes.write_int(idx);
 530         }
 531       }
 532     }
 533   } else {
 534 #endif // INCLUDE_JVMCI
 535   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 536     DepType dept = (DepType)deptv;
 537     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 538     if (deps->length() == 0)  continue;
 539     int stride = dep_args(dept);
 540     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 541     assert(stride > 0, "sanity");
 542     for (int i = 0; i < deps->length(); i += stride) {
 543       jbyte code_byte = (jbyte)dept;
 544       int skipj = -1;
 545       if (ctxkj >= 0 && ctxkj+1 < stride) {
 546         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
 547         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
 548         if (ctxk == ctxk_encoded_as_null(dept, x)) {
 549           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 550           code_byte |= default_context_type_bit;
 551         }
 552       }
 553       bytes.write_byte(code_byte);
 554       for (int j = 0; j < stride; j++) {
 555         if (j == skipj)  continue;
 556         ciBaseObject* v = deps->at(i+j);
 557         int idx;
 558         if (v->is_object()) {
 559           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
 560         } else {
 561           ciMetadata* meta = v->as_metadata();
 562           idx = _oop_recorder->find_index(meta->constant_encoding());
 563         }
 564         bytes.write_int(idx);
 565       }
 566     }
 567   }
 568 #if INCLUDE_JVMCI
 569   }
 570 #endif
 571 
 572   // write a sentinel byte to mark the end
 573   bytes.write_byte(end_marker);
 574 
 575   // round it out to a word boundary
 576   while (bytes.position() % sizeof(HeapWord) != 0) {
 577     bytes.write_byte(end_marker);
 578   }
 579 
 580   // check whether the dept byte encoding really works
 581   assert((jbyte)default_context_type_bit != 0, "byte overflow");
 582 
 583   _content_bytes = bytes.buffer();
 584   _size_in_bytes = bytes.position();
 585 }
 586 
 587 
 588 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
 589   "end_marker",
 590   "evol_method",
 591   "leaf_type",
 592   "abstract_with_unique_concrete_subtype",
 593   "abstract_with_no_concrete_subtype",
 594   "concrete_with_no_concrete_subtype",
 595   "unique_concrete_method",
 596   "abstract_with_exclusive_concrete_subtypes_2",
 597   "exclusive_concrete_methods_2",
 598   "no_finalizable_subclasses",
 599   "call_site_target_value"
 600 };
 601 
 602 int Dependencies::_dep_args[TYPE_LIMIT] = {
 603   -1,// end_marker
 604   1, // evol_method m
 605   1, // leaf_type ctxk
 606   2, // abstract_with_unique_concrete_subtype ctxk, k
 607   1, // abstract_with_no_concrete_subtype ctxk
 608   1, // concrete_with_no_concrete_subtype ctxk
 609   2, // unique_concrete_method ctxk, m
 610   3, // unique_concrete_subtypes_2 ctxk, k1, k2
 611   3, // unique_concrete_methods_2 ctxk, m1, m2
 612   1, // no_finalizable_subclasses ctxk
 613   2  // call_site_target_value call_site, method_handle
 614 };
 615 
 616 const char* Dependencies::dep_name(Dependencies::DepType dept) {
 617   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
 618   return _dep_name[dept];
 619 }
 620 
 621 int Dependencies::dep_args(Dependencies::DepType dept) {
 622   if (!dept_in_mask(dept, all_types))  return -1;
 623   return _dep_args[dept];
 624 }
 625 
 626 void Dependencies::check_valid_dependency_type(DepType dept) {
 627   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
 628 }
 629 
 630 Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, char** failure_detail) {
 631   int klass_violations = 0;
 632   DepType result = end_marker;
 633   for (Dependencies::DepStream deps(this); deps.next(); ) {
 634     Klass* witness = deps.check_dependency();
 635     if (witness != NULL) {
 636       if (klass_violations == 0) {
 637         result = deps.type();
 638         if (failure_detail != NULL && klass_violations == 0) {
 639           // Use a fixed size buffer to prevent the string stream from
 640           // resizing in the context of an inner resource mark.
 641           char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN);
 642           stringStream st(buffer, O_BUFLEN);
 643           deps.print_dependency(witness, true, &st);
 644           *failure_detail = st.as_string();
 645         }
 646       }
 647       klass_violations++;
 648       if (xtty == NULL) {
 649         // If we're not logging then a single violation is sufficient,
 650         // otherwise we want to log all the dependences which were
 651         // violated.
 652         break;
 653       }
 654     }
 655   }
 656 
 657   return result;
 658 }
 659 
 660 // for the sake of the compiler log, print out current dependencies:
 661 void Dependencies::log_all_dependencies() {
 662   if (log() == NULL)  return;
 663   ResourceMark rm;
 664   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 665     DepType dept = (DepType)deptv;
 666     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 667     int deplen = deps->length();
 668     if (deplen == 0) {
 669       continue;
 670     }
 671     int stride = dep_args(dept);
 672     GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
 673     for (int i = 0; i < deps->length(); i += stride) {
 674       for (int j = 0; j < stride; j++) {
 675         // flush out the identities before printing
 676         ciargs->push(deps->at(i+j));
 677       }
 678       write_dependency_to(log(), dept, ciargs);
 679       ciargs->clear();
 680     }
 681     guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
 682   }
 683 }
 684 
 685 void Dependencies::write_dependency_to(CompileLog* log,
 686                                        DepType dept,
 687                                        GrowableArray<DepArgument>* args,
 688                                        Klass* witness) {
 689   if (log == NULL) {
 690     return;
 691   }
 692   ResourceMark rm;
 693   ciEnv* env = ciEnv::current();
 694   GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
 695   for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
 696     DepArgument arg = *it;
 697     if (arg.is_oop()) {
 698       ciargs->push(env->get_object(arg.oop_value()));
 699     } else {
 700       ciargs->push(env->get_metadata(arg.metadata_value()));
 701     }
 702   }
 703   int argslen = ciargs->length();
 704   Dependencies::write_dependency_to(log, dept, ciargs, witness);
 705   guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
 706 }
 707 
 708 void Dependencies::write_dependency_to(CompileLog* log,
 709                                        DepType dept,
 710                                        GrowableArray<ciBaseObject*>* args,
 711                                        Klass* witness) {
 712   if (log == NULL) {
 713     return;
 714   }
 715   ResourceMark rm;
 716   GrowableArray<int>* argids = new GrowableArray<int>(args->length());
 717   for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
 718     ciBaseObject* obj = *it;
 719     if (obj->is_object()) {
 720       argids->push(log->identify(obj->as_object()));
 721     } else {
 722       argids->push(log->identify(obj->as_metadata()));
 723     }
 724   }
 725   if (witness != NULL) {
 726     log->begin_elem("dependency_failed");
 727   } else {
 728     log->begin_elem("dependency");
 729   }
 730   log->print(" type='%s'", dep_name(dept));
 731   const int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 732   if (ctxkj >= 0 && ctxkj < argids->length()) {
 733     log->print(" ctxk='%d'", argids->at(ctxkj));
 734   }
 735   // write remaining arguments, if any.
 736   for (int j = 0; j < argids->length(); j++) {
 737     if (j == ctxkj)  continue;  // already logged
 738     if (j == 1) {
 739       log->print(  " x='%d'",    argids->at(j));
 740     } else {
 741       log->print(" x%d='%d'", j, argids->at(j));
 742     }
 743   }
 744   if (witness != NULL) {
 745     log->object("witness", witness);
 746     log->stamp();
 747   }
 748   log->end_elem();
 749 }
 750 
 751 void Dependencies::write_dependency_to(xmlStream* xtty,
 752                                        DepType dept,
 753                                        GrowableArray<DepArgument>* args,
 754                                        Klass* witness) {
 755   if (xtty == NULL) {
 756     return;
 757   }
 758   Thread* thread = Thread::current();
 759   HandleMark rm(thread);
 760   ttyLocker ttyl;
 761   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 762   if (witness != NULL) {
 763     xtty->begin_elem("dependency_failed");
 764   } else {
 765     xtty->begin_elem("dependency");
 766   }
 767   xtty->print(" type='%s'", dep_name(dept));
 768   if (ctxkj >= 0) {
 769     xtty->object("ctxk", args->at(ctxkj).metadata_value());
 770   }
 771   // write remaining arguments, if any.
 772   for (int j = 0; j < args->length(); j++) {
 773     if (j == ctxkj)  continue;  // already logged
 774     DepArgument arg = args->at(j);
 775     if (j == 1) {
 776       if (arg.is_oop()) {
 777         xtty->object("x", Handle(thread, arg.oop_value()));
 778       } else {
 779         xtty->object("x", arg.metadata_value());
 780       }
 781     } else {
 782       char xn[12]; sprintf(xn, "x%d", j);
 783       if (arg.is_oop()) {
 784         xtty->object(xn, Handle(thread, arg.oop_value()));
 785       } else {
 786         xtty->object(xn, arg.metadata_value());
 787       }
 788     }
 789   }
 790   if (witness != NULL) {
 791     xtty->object("witness", witness);
 792     xtty->stamp();
 793   }
 794   xtty->end_elem();
 795 }
 796 
 797 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
 798                                     Klass* witness, outputStream* st) {
 799   ResourceMark rm;
 800   ttyLocker ttyl;   // keep the following output all in one block
 801   st->print_cr("%s of type %s",
 802                 (witness == NULL)? "Dependency": "Failed dependency",
 803                 dep_name(dept));
 804   // print arguments
 805   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 806   for (int j = 0; j < args->length(); j++) {
 807     DepArgument arg = args->at(j);
 808     bool put_star = false;
 809     if (arg.is_null())  continue;
 810     const char* what;
 811     if (j == ctxkj) {
 812       assert(arg.is_metadata(), "must be");
 813       what = "context";
 814       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
 815     } else if (arg.is_method()) {
 816       what = "method ";
 817       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
 818     } else if (arg.is_klass()) {
 819       what = "class  ";
 820     } else {
 821       what = "object ";
 822     }
 823     st->print("  %s = %s", what, (put_star? "*": ""));
 824     if (arg.is_klass()) {
 825       st->print("%s", ((Klass*)arg.metadata_value())->external_name());
 826     } else if (arg.is_method()) {
 827       ((Method*)arg.metadata_value())->print_value_on(st);
 828     } else if (arg.is_oop()) {
 829       arg.oop_value()->print_value_on(st);
 830     } else {
 831       ShouldNotReachHere(); // Provide impl for this type.
 832     }
 833 
 834     st->cr();
 835   }
 836   if (witness != NULL) {
 837     bool put_star = !Dependencies::is_concrete_klass(witness);
 838     st->print_cr("  witness = %s%s",
 839                   (put_star? "*": ""),
 840                   witness->external_name());
 841   }
 842 }
 843 
 844 void Dependencies::DepStream::log_dependency(Klass* witness) {
 845   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
 846   ResourceMark rm;
 847   const int nargs = argument_count();
 848   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 849   for (int j = 0; j < nargs; j++) {
 850     if (is_oop_argument(j)) {
 851       args->push(argument_oop(j));
 852     } else {
 853       args->push(argument(j));
 854     }
 855   }
 856   int argslen = args->length();
 857   if (_deps != NULL && _deps->log() != NULL) {
 858     if (ciEnv::current() != NULL) {
 859       Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
 860     } else {
 861       // Treat the CompileLog as an xmlstream instead
 862       Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
 863     }
 864   } else {
 865     Dependencies::write_dependency_to(xtty, type(), args, witness);
 866   }
 867   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 868 }
 869 
 870 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) {
 871   ResourceMark rm;
 872   int nargs = argument_count();
 873   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 874   for (int j = 0; j < nargs; j++) {
 875     if (is_oop_argument(j)) {
 876       args->push(argument_oop(j));
 877     } else {
 878       args->push(argument(j));
 879     }
 880   }
 881   int argslen = args->length();
 882   Dependencies::print_dependency(type(), args, witness, st);
 883   if (verbose) {
 884     if (_code != NULL) {
 885       st->print("  code: ");
 886       _code->print_value_on(st);
 887       st->cr();
 888     }
 889   }
 890   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 891 }
 892 
 893 
 894 /// Dependency stream support (decodes dependencies from an nmethod):
 895 
 896 #ifdef ASSERT
 897 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
 898   assert(must_be_in_vm(), "raw oops here");
 899   _byte_limit = byte_limit;
 900   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
 901   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
 902 }
 903 #endif //ASSERT
 904 
 905 bool Dependencies::DepStream::next() {
 906   assert(_type != end_marker, "already at end");
 907   if (_bytes.position() == 0 && _code != NULL
 908       && _code->dependencies_size() == 0) {
 909     // Method has no dependencies at all.
 910     return false;
 911   }
 912   int code_byte = (_bytes.read_byte() & 0xFF);
 913   if (code_byte == end_marker) {
 914     DEBUG_ONLY(_type = end_marker);
 915     return false;
 916   } else {
 917     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
 918     code_byte -= ctxk_bit;
 919     DepType dept = (DepType)code_byte;
 920     _type = dept;
 921     Dependencies::check_valid_dependency_type(dept);
 922     int stride = _dep_args[dept];
 923     assert(stride == dep_args(dept), "sanity");
 924     int skipj = -1;
 925     if (ctxk_bit != 0) {
 926       skipj = 0;  // currently the only context argument is at zero
 927       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
 928     }
 929     for (int j = 0; j < stride; j++) {
 930       _xi[j] = (j == skipj)? 0: _bytes.read_int();
 931     }
 932     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
 933     return true;
 934   }
 935 }
 936 
 937 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
 938   Metadata* o = NULL;
 939   if (_code != NULL) {
 940     o = _code->metadata_at(i);
 941   } else {
 942     o = _deps->oop_recorder()->metadata_at(i);
 943   }
 944   return o;
 945 }
 946 
 947 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
 948   return (_code != NULL)
 949          ? _code->oop_at(i)
 950     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
 951 }
 952 
 953 Metadata* Dependencies::DepStream::argument(int i) {
 954   Metadata* result = recorded_metadata_at(argument_index(i));
 955 
 956   if (result == NULL) { // Explicit context argument can be compressed
 957     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 958     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
 959       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
 960     }
 961   }
 962 
 963   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
 964   return result;
 965 }
 966 
 967 /**
 968  * Returns a unique identifier for each dependency argument.
 969  */
 970 uintptr_t Dependencies::DepStream::get_identifier(int i) {
 971   if (is_oop_argument(i)) {
 972     return (uintptr_t)(oopDesc*)argument_oop(i);
 973   } else {
 974     return (uintptr_t)argument(i);
 975   }
 976 }
 977 
 978 oop Dependencies::DepStream::argument_oop(int i) {
 979   oop result = recorded_oop_at(argument_index(i));
 980   assert(oopDesc::is_oop_or_null(result), "must be");
 981   return result;
 982 }
 983 
 984 Klass* Dependencies::DepStream::context_type() {
 985   assert(must_be_in_vm(), "raw oops here");
 986 
 987   // Most dependencies have an explicit context type argument.
 988   {
 989     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 990     if (ctxkj >= 0) {
 991       Metadata* k = argument(ctxkj);
 992       assert(k != NULL && k->is_klass(), "type check");
 993       return (Klass*)k;
 994     }
 995   }
 996 
 997   // Some dependencies are using the klass of the first object
 998   // argument as implicit context type.
 999   {
1000     int ctxkj = dep_implicit_context_arg(type());
1001     if (ctxkj >= 0) {
1002       Klass* k = argument_oop(ctxkj)->klass();
1003       assert(k != NULL && k->is_klass(), "type check");
1004       return (Klass*) k;
1005     }
1006   }
1007 
1008   // And some dependencies don't have a context type at all,
1009   // e.g. evol_method.
1010   return NULL;
1011 }
1012 
1013 // ----------------- DependencySignature --------------------------------------
1014 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
1015   if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
1016     return false;
1017   }
1018 
1019   for (int i = 0; i < s1.args_count(); i++) {
1020     if (s1.arg(i) != s2.arg(i)) {
1021       return false;
1022     }
1023   }
1024   return true;
1025 }
1026 
1027 /// Checking dependencies:
1028 
1029 // This hierarchy walker inspects subtypes of a given type,
1030 // trying to find a "bad" class which breaks a dependency.
1031 // Such a class is called a "witness" to the broken dependency.
1032 // While searching around, we ignore "participants", which
1033 // are already known to the dependency.
1034 class ClassHierarchyWalker {
1035  public:
1036   enum { PARTICIPANT_LIMIT = 3 };
1037 
1038  private:
1039   // optional method descriptor to check for:
1040   Symbol* _name;
1041   Symbol* _signature;
1042 
1043   // special classes which are not allowed to be witnesses:
1044   Klass*    _participants[PARTICIPANT_LIMIT+1];
1045   int       _num_participants;
1046 
1047   // cache of method lookups
1048   Method* _found_methods[PARTICIPANT_LIMIT+1];
1049 
1050   // if non-zero, tells how many witnesses to convert to participants
1051   int       _record_witnesses;
1052 
1053   void initialize(Klass* participant) {
1054     _record_witnesses = 0;
1055     _participants[0]  = participant;
1056     _found_methods[0] = NULL;
1057     _num_participants = 0;
1058     if (participant != NULL) {
1059       // Terminating NULL.
1060       _participants[1] = NULL;
1061       _found_methods[1] = NULL;
1062       _num_participants = 1;
1063     }
1064   }
1065 
1066   void initialize_from_method(Method* m) {
1067     assert(m != NULL && m->is_method(), "sanity");
1068     _name      = m->name();
1069     _signature = m->signature();
1070   }
1071 
1072  public:
1073   // The walker is initialized to recognize certain methods and/or types
1074   // as friendly participants.
1075   ClassHierarchyWalker(Klass* participant, Method* m) {
1076     initialize_from_method(m);
1077     initialize(participant);
1078   }
1079   ClassHierarchyWalker(Method* m) {
1080     initialize_from_method(m);
1081     initialize(NULL);
1082   }
1083   ClassHierarchyWalker(Klass* participant = NULL) {
1084     _name      = NULL;
1085     _signature = NULL;
1086     initialize(participant);
1087   }
1088   ClassHierarchyWalker(Klass* participants[], int num_participants) {
1089     _name      = NULL;
1090     _signature = NULL;
1091     initialize(NULL);
1092     for (int i = 0; i < num_participants; ++i) {
1093       add_participant(participants[i]);
1094     }
1095   }
1096 
1097   // This is common code for two searches:  One for concrete subtypes,
1098   // the other for concrete method implementations and overrides.
1099   bool doing_subtype_search() {
1100     return _name == NULL;
1101   }
1102 
1103   int num_participants() { return _num_participants; }
1104   Klass* participant(int n) {
1105     assert((uint)n <= (uint)_num_participants, "oob");
1106     return _participants[n];
1107   }
1108 
1109   // Note:  If n==num_participants, returns NULL.
1110   Method* found_method(int n) {
1111     assert((uint)n <= (uint)_num_participants, "oob");
1112     Method* fm = _found_methods[n];
1113     assert(n == _num_participants || fm != NULL, "proper usage");
1114     if (fm != NULL && fm->method_holder() != _participants[n]) {
1115       // Default methods from interfaces can be added to classes. In
1116       // that case the holder of the method is not the class but the
1117       // interface where it's defined.
1118       assert(fm->is_default_method(), "sanity");
1119       return NULL;
1120     }
1121     return fm;
1122   }
1123 
1124 #ifdef ASSERT
1125   // Assert that m is inherited into ctxk, without intervening overrides.
1126   // (May return true even if this is not true, in corner cases where we punt.)
1127   bool check_method_context(Klass* ctxk, Method* m) {
1128     if (m->method_holder() == ctxk)
1129       return true;  // Quick win.
1130     if (m->is_private())
1131       return false; // Quick lose.  Should not happen.
1132     if (!(m->is_public() || m->is_protected()))
1133       // The override story is complex when packages get involved.
1134       return true;  // Must punt the assertion to true.
1135     Method* lm = ctxk->lookup_method(m->name(), m->signature());
1136     if (lm == NULL && ctxk->is_instance_klass()) {
1137       // It might be an interface method
1138       lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(),
1139                                                                           m->signature());
1140     }
1141     if (lm == m)
1142       // Method m is inherited into ctxk.
1143       return true;
1144     if (lm != NULL) {
1145       if (!(lm->is_public() || lm->is_protected())) {
1146         // Method is [package-]private, so the override story is complex.
1147         return true;  // Must punt the assertion to true.
1148       }
1149       if (lm->is_static()) {
1150         // Static methods don't override non-static so punt
1151         return true;
1152       }
1153       if (!Dependencies::is_concrete_method(lm, ctxk) &&
1154           !Dependencies::is_concrete_method(m, ctxk)) {
1155         // They are both non-concrete
1156         if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1157           // Method m is overridden by lm, but both are non-concrete.
1158           return true;
1159         }
1160         if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1161             ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1162           // Interface method defined in multiple super interfaces
1163           return true;
1164         }
1165       }
1166     }
1167     ResourceMark rm;
1168     tty->print_cr("Dependency method not found in the associated context:");
1169     tty->print_cr("  context = %s", ctxk->external_name());
1170     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
1171     if (lm != NULL) {
1172       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
1173     }
1174     return false;
1175   }
1176 #endif
1177 
1178   void add_participant(Klass* participant) {
1179     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1180     int np = _num_participants++;
1181     _participants[np] = participant;
1182     _participants[np+1] = NULL;
1183     _found_methods[np+1] = NULL;
1184   }
1185 
1186   void record_witnesses(int add) {
1187     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
1188     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1189     _record_witnesses = add;
1190   }
1191 
1192   bool is_witness(Klass* k) {
1193     if (doing_subtype_search()) {
1194       return Dependencies::is_concrete_klass(k);
1195     } else if (!k->is_instance_klass()) {
1196       return false; // no methods to find in an array type
1197     } else {
1198       // Search class hierarchy first, skipping private implementations
1199       // as they never override any inherited methods
1200       Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::skip_private);
1201       if (!Dependencies::is_concrete_method(m, k)) {
1202         // Check for re-abstraction of method
1203         if (!k->is_interface() && m != NULL && m->is_abstract()) {
1204           // Found a matching abstract method 'm' in the class hierarchy.
1205           // This is fine iff 'k' is an abstract class and all concrete subtypes
1206           // of 'k' override 'm' and are participates of the current search.
1207           ClassHierarchyWalker wf(_participants, _num_participants);
1208           Klass* w = wf.find_witness_subtype(k);
1209           if (w != NULL) {
1210             Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature, Klass::skip_private);
1211             if (!Dependencies::is_concrete_method(wm, w)) {
1212               // Found a concrete subtype 'w' which does not override abstract method 'm'.
1213               // Bail out because 'm' could be called with 'w' as receiver (leading to an
1214               // AbstractMethodError) and thus the method we are looking for is not unique.
1215               _found_methods[_num_participants] = m;
1216               return true;
1217             }
1218           }
1219         }
1220         // Check interface defaults also, if any exist.
1221         Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
1222         if (default_methods == NULL)
1223             return false;
1224         m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
1225         if (!Dependencies::is_concrete_method(m, NULL))
1226             return false;
1227       }
1228       _found_methods[_num_participants] = m;
1229       // Note:  If add_participant(k) is called,
1230       // the method m will already be memoized for it.
1231       return true;
1232     }
1233   }
1234 
1235   bool is_participant(Klass* k) {
1236     if (k == _participants[0]) {
1237       return true;
1238     } else if (_num_participants <= 1) {
1239       return false;
1240     } else {
1241       return in_list(k, &_participants[1]);
1242     }
1243   }
1244   bool ignore_witness(Klass* witness) {
1245     if (_record_witnesses == 0) {
1246       return false;
1247     } else {
1248       --_record_witnesses;
1249       add_participant(witness);
1250       return true;
1251     }
1252   }
1253   static bool in_list(Klass* x, Klass** list) {
1254     for (int i = 0; ; i++) {
1255       Klass* y = list[i];
1256       if (y == NULL)  break;
1257       if (y == x)  return true;
1258     }
1259     return false;  // not in list
1260   }
1261 
1262  private:
1263   // the actual search method:
1264   Klass* find_witness_anywhere(Klass* context_type,
1265                                  bool participants_hide_witnesses,
1266                                  bool top_level_call = true);
1267   // the spot-checking version:
1268   Klass* find_witness_in(KlassDepChange& changes,
1269                          Klass* context_type,
1270                            bool participants_hide_witnesses);
1271  public:
1272   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
1273     assert(doing_subtype_search(), "must set up a subtype search");
1274     // When looking for unexpected concrete types,
1275     // do not look beneath expected ones.
1276     const bool participants_hide_witnesses = true;
1277     // CX > CC > C' is OK, even if C' is new.
1278     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
1279     if (changes != NULL) {
1280       return find_witness_in(*changes, context_type, participants_hide_witnesses);
1281     } else {
1282       return find_witness_anywhere(context_type, participants_hide_witnesses);
1283     }
1284   }
1285   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
1286     assert(!doing_subtype_search(), "must set up a method definer search");
1287     // When looking for unexpected concrete methods,
1288     // look beneath expected ones, to see if there are overrides.
1289     const bool participants_hide_witnesses = true;
1290     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1291     if (changes != NULL) {
1292       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
1293     } else {
1294       return find_witness_anywhere(context_type, !participants_hide_witnesses);
1295     }
1296   }
1297 };
1298 
1299 #ifndef PRODUCT
1300 static int deps_find_witness_calls = 0;
1301 static int deps_find_witness_steps = 0;
1302 static int deps_find_witness_recursions = 0;
1303 static int deps_find_witness_singles = 0;
1304 static int deps_find_witness_print = 0; // set to -1 to force a final print
1305 static bool count_find_witness_calls() {
1306   if (TraceDependencies || LogCompilation) {
1307     int pcount = deps_find_witness_print + 1;
1308     bool final_stats      = (pcount == 0);
1309     bool initial_call     = (pcount == 1);
1310     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1311     if (pcount < 0)  pcount = 1; // crude overflow protection
1312     deps_find_witness_print = pcount;
1313     if (VerifyDependencies && initial_call) {
1314       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1315     }
1316     if (occasional_print || final_stats) {
1317       // Every now and then dump a little info about dependency searching.
1318       if (xtty != NULL) {
1319        ttyLocker ttyl;
1320        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1321                    deps_find_witness_calls,
1322                    deps_find_witness_steps,
1323                    deps_find_witness_recursions,
1324                    deps_find_witness_singles);
1325       }
1326       if (final_stats || (TraceDependencies && WizardMode)) {
1327         ttyLocker ttyl;
1328         tty->print_cr("Dependency check (find_witness) "
1329                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1330                       deps_find_witness_calls,
1331                       deps_find_witness_steps,
1332                       (double)deps_find_witness_steps / deps_find_witness_calls,
1333                       deps_find_witness_recursions,
1334                       deps_find_witness_singles);
1335       }
1336     }
1337     return true;
1338   }
1339   return false;
1340 }
1341 #else
1342 #define count_find_witness_calls() (0)
1343 #endif //PRODUCT
1344 
1345 
1346 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1347                                                Klass* context_type,
1348                                                bool participants_hide_witnesses) {
1349   assert(changes.involves_context(context_type), "irrelevant dependency");
1350   Klass* new_type = changes.new_type();
1351 
1352   (void)count_find_witness_calls();
1353   NOT_PRODUCT(deps_find_witness_singles++);
1354 
1355   // Current thread must be in VM (not native mode, as in CI):
1356   assert(must_be_in_vm(), "raw oops here");
1357   // Must not move the class hierarchy during this check:
1358   assert_locked_or_safepoint(Compile_lock);
1359 
1360   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1361   if (nof_impls > 1) {
1362     // Avoid this case: *I.m > { A.m, C }; B.m > C
1363     // %%% Until this is fixed more systematically, bail out.
1364     // See corresponding comment in find_witness_anywhere.
1365     return context_type;
1366   }
1367 
1368   assert(!is_participant(new_type), "only old classes are participants");
1369   if (participants_hide_witnesses) {
1370     // If the new type is a subtype of a participant, we are done.
1371     for (int i = 0; i < num_participants(); i++) {
1372       Klass* part = participant(i);
1373       if (part == NULL)  continue;
1374       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1375              "correct marking of participants, b/c new_type is unique");
1376       if (changes.involves_context(part)) {
1377         // new guy is protected from this check by previous participant
1378         return NULL;
1379       }
1380     }
1381   }
1382 
1383   if (is_witness(new_type) &&
1384       !ignore_witness(new_type)) {
1385     return new_type;
1386   }
1387 
1388   return NULL;
1389 }
1390 
1391 
1392 // Walk hierarchy under a context type, looking for unexpected types.
1393 // Do not report participant types, and recursively walk beneath
1394 // them only if participants_hide_witnesses is false.
1395 // If top_level_call is false, skip testing the context type,
1396 // because the caller has already considered it.
1397 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1398                                                      bool participants_hide_witnesses,
1399                                                      bool top_level_call) {
1400   // Current thread must be in VM (not native mode, as in CI):
1401   assert(must_be_in_vm(), "raw oops here");
1402   // Must not move the class hierarchy during this check:
1403   assert_locked_or_safepoint(Compile_lock);
1404 
1405   bool do_counts = count_find_witness_calls();
1406 
1407   // Check the root of the sub-hierarchy first.
1408   if (top_level_call) {
1409     if (do_counts) {
1410       NOT_PRODUCT(deps_find_witness_calls++);
1411       NOT_PRODUCT(deps_find_witness_steps++);
1412     }
1413     if (is_participant(context_type)) {
1414       if (participants_hide_witnesses)  return NULL;
1415       // else fall through to search loop...
1416     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1417       // The context is an abstract class or interface, to start with.
1418       return context_type;
1419     }
1420   }
1421 
1422   // Now we must check each implementor and each subclass.
1423   // Use a short worklist to avoid blowing the stack.
1424   // Each worklist entry is a *chain* of subklass siblings to process.
1425   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1426   Klass* chains[CHAINMAX];
1427   int    chaini = 0;  // index into worklist
1428   Klass* chain;       // scratch variable
1429 #define ADD_SUBCLASS_CHAIN(k)                     {  \
1430     assert(chaini < CHAINMAX, "oob");                \
1431     chain = k->subklass();                           \
1432     if (chain != NULL)  chains[chaini++] = chain;    }
1433 
1434   // Look for non-abstract subclasses.
1435   // (Note:  Interfaces do not have subclasses.)
1436   ADD_SUBCLASS_CHAIN(context_type);
1437 
1438   // If it is an interface, search its direct implementors.
1439   // (Their subclasses are additional indirect implementors.
1440   // See InstanceKlass::add_implementor.)
1441   // (Note:  nof_implementors is always zero for non-interfaces.)
1442   if (top_level_call) {
1443     int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1444     if (nof_impls > 1) {
1445       // Avoid this case: *I.m > { A.m, C }; B.m > C
1446       // Here, I.m has 2 concrete implementations, but m appears unique
1447       // as A.m, because the search misses B.m when checking C.
1448       // The inherited method B.m was getting missed by the walker
1449       // when interface 'I' was the starting point.
1450       // %%% Until this is fixed more systematically, bail out.
1451       // (Old CHA had the same limitation.)
1452       return context_type;
1453     }
1454     if (nof_impls > 0) {
1455       Klass* impl = InstanceKlass::cast(context_type)->implementor();
1456       assert(impl != NULL, "just checking");
1457       // If impl is the same as the context_type, then more than one
1458       // implementor has seen. No exact info in this case.
1459       if (impl == context_type) {
1460         return context_type;  // report an inexact witness to this sad affair
1461       }
1462       if (do_counts)
1463         { NOT_PRODUCT(deps_find_witness_steps++); }
1464       if (is_participant(impl)) {
1465         if (!participants_hide_witnesses) {
1466           ADD_SUBCLASS_CHAIN(impl);
1467         }
1468       } else if (is_witness(impl) && !ignore_witness(impl)) {
1469         return impl;
1470       } else {
1471         ADD_SUBCLASS_CHAIN(impl);
1472       }
1473     }
1474   }
1475 
1476   // Recursively process each non-trivial sibling chain.
1477   while (chaini > 0) {
1478     Klass* chain = chains[--chaini];
1479     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1480       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1481       if (is_participant(sub)) {
1482         if (participants_hide_witnesses)  continue;
1483         // else fall through to process this guy's subclasses
1484       } else if (is_witness(sub) && !ignore_witness(sub)) {
1485         return sub;
1486       }
1487       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1488         // Fast path.  (Partially disabled if VerifyDependencies.)
1489         ADD_SUBCLASS_CHAIN(sub);
1490       } else {
1491         // Worklist overflow.  Do a recursive call.  Should be rare.
1492         // The recursive call will have its own worklist, of course.
1493         // (Note that sub has already been tested, so that there is
1494         // no need for the recursive call to re-test.  That's handy,
1495         // since the recursive call sees sub as the context_type.)
1496         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1497         Klass* witness = find_witness_anywhere(sub,
1498                                                  participants_hide_witnesses,
1499                                                  /*top_level_call=*/ false);
1500         if (witness != NULL)  return witness;
1501       }
1502     }
1503   }
1504 
1505   // No witness found.  The dependency remains unbroken.
1506   return NULL;
1507 #undef ADD_SUBCLASS_CHAIN
1508 }
1509 
1510 
1511 bool Dependencies::is_concrete_klass(Klass* k) {
1512   if (k->is_abstract())  return false;
1513   // %%% We could treat classes which are concrete but
1514   // have not yet been instantiated as virtually abstract.
1515   // This would require a deoptimization barrier on first instantiation.
1516   //if (k->is_not_instantiated())  return false;
1517   return true;
1518 }
1519 
1520 bool Dependencies::is_concrete_method(Method* m, Klass * k) {
1521   // NULL is not a concrete method,
1522   // statics are irrelevant to virtual call sites,
1523   // abstract methods are not concrete,
1524   // overpass (error) methods are not concrete if k is abstract
1525   //
1526   // note "true" is conservative answer --
1527   //     overpass clause is false if k == NULL, implies return true if
1528   //     answer depends on overpass clause.
1529   return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
1530              (m->is_overpass() && k != NULL && k -> is_abstract()) );
1531 }
1532 
1533 
1534 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1535   if (k->is_interface())  return NULL;
1536   if (k->has_finalizer()) return k;
1537   k = k->subklass();
1538   while (k != NULL) {
1539     Klass* result = find_finalizable_subclass(k);
1540     if (result != NULL) return result;
1541     k = k->next_sibling();
1542   }
1543   return NULL;
1544 }
1545 
1546 
1547 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1548   if (k->is_abstract())  return false;
1549   // We could also return false if k does not yet appear to be
1550   // instantiated, if the VM version supports this distinction also.
1551   //if (k->is_not_instantiated())  return false;
1552   return true;
1553 }
1554 
1555 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1556   return k->has_finalizable_subclass();
1557 }
1558 
1559 
1560 // Any use of the contents (bytecodes) of a method must be
1561 // marked by an "evol_method" dependency, if those contents
1562 // can change.  (Note: A method is always dependent on itself.)
1563 Klass* Dependencies::check_evol_method(Method* m) {
1564   assert(must_be_in_vm(), "raw oops here");
1565   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1566   // Or is there a now a breakpoint?
1567   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1568   if (m->is_old()
1569       || m->number_of_breakpoints() > 0) {
1570     return m->method_holder();
1571   } else {
1572     return NULL;
1573   }
1574 }
1575 
1576 // This is a strong assertion:  It is that the given type
1577 // has no subtypes whatever.  It is most useful for
1578 // optimizing checks on reflected types or on array types.
1579 // (Checks on types which are derived from real instances
1580 // can be optimized more strongly than this, because we
1581 // know that the checked type comes from a concrete type,
1582 // and therefore we can disregard abstract types.)
1583 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1584   assert(must_be_in_vm(), "raw oops here");
1585   assert_locked_or_safepoint(Compile_lock);
1586   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1587   Klass* sub = ctx->subklass();
1588   if (sub != NULL) {
1589     return sub;
1590   } else if (ctx->nof_implementors() != 0) {
1591     // if it is an interface, it must be unimplemented
1592     // (if it is not an interface, nof_implementors is always zero)
1593     Klass* impl = ctx->implementor();
1594     assert(impl != NULL, "must be set");
1595     return impl;
1596   } else {
1597     return NULL;
1598   }
1599 }
1600 
1601 // Test the assertion that conck is the only concrete subtype* of ctxk.
1602 // The type conck itself is allowed to have have further concrete subtypes.
1603 // This allows the compiler to narrow occurrences of ctxk by conck,
1604 // when dealing with the types of actual instances.
1605 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1606                                                                    Klass* conck,
1607                                                                    KlassDepChange* changes) {
1608   ClassHierarchyWalker wf(conck);
1609   return wf.find_witness_subtype(ctxk, changes);
1610 }
1611 
1612 // If a non-concrete class has no concrete subtypes, it is not (yet)
1613 // instantiatable.  This can allow the compiler to make some paths go
1614 // dead, if they are gated by a test of the type.
1615 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1616                                                                KlassDepChange* changes) {
1617   // Find any concrete subtype, with no participants:
1618   ClassHierarchyWalker wf;
1619   return wf.find_witness_subtype(ctxk, changes);
1620 }
1621 
1622 
1623 // If a concrete class has no concrete subtypes, it can always be
1624 // exactly typed.  This allows the use of a cheaper type test.
1625 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1626                                                                KlassDepChange* changes) {
1627   // Find any concrete subtype, with only the ctxk as participant:
1628   ClassHierarchyWalker wf(ctxk);
1629   return wf.find_witness_subtype(ctxk, changes);
1630 }
1631 
1632 
1633 // Find the unique concrete proper subtype of ctxk, or NULL if there
1634 // is more than one concrete proper subtype.  If there are no concrete
1635 // proper subtypes, return ctxk itself, whether it is concrete or not.
1636 // The returned subtype is allowed to have have further concrete subtypes.
1637 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1638 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1639   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1640   wf.record_witnesses(1);          // Record one other witness when walking.
1641   Klass* wit = wf.find_witness_subtype(ctxk);
1642   if (wit != NULL)  return NULL;   // Too many witnesses.
1643   Klass* conck = wf.participant(0);
1644   if (conck == NULL) {
1645 #ifndef PRODUCT
1646     // Make sure the dependency mechanism will pass this discovery:
1647     if (VerifyDependencies) {
1648       // Turn off dependency tracing while actually testing deps.
1649       FlagSetting fs(TraceDependencies, false);
1650       if (!Dependencies::is_concrete_klass(ctxk)) {
1651         guarantee(NULL ==
1652                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
1653                   "verify dep.");
1654       } else {
1655         guarantee(NULL ==
1656                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
1657                   "verify dep.");
1658       }
1659     }
1660 #endif //PRODUCT
1661     return ctxk;                   // Return ctxk as a flag for "no subtypes".
1662   } else {
1663 #ifndef PRODUCT
1664     // Make sure the dependency mechanism will pass this discovery:
1665     if (VerifyDependencies) {
1666       // Turn off dependency tracing while actually testing deps.
1667       FlagSetting fs(TraceDependencies, false);
1668       if (!Dependencies::is_concrete_klass(ctxk)) {
1669         guarantee(NULL == (void *)
1670                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
1671                   "verify dep.");
1672       }
1673     }
1674 #endif //PRODUCT
1675     return conck;
1676   }
1677 }
1678 
1679 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1680 // except possibly for further subtypes of k[12] themselves.
1681 // The context type must be abstract.  The types k1 and k2 are themselves
1682 // allowed to have further concrete subtypes.
1683 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1684                                                 Klass* ctxk,
1685                                                 Klass* k1,
1686                                                 Klass* k2,
1687                                                 KlassDepChange* changes) {
1688   ClassHierarchyWalker wf;
1689   wf.add_participant(k1);
1690   wf.add_participant(k2);
1691   return wf.find_witness_subtype(ctxk, changes);
1692 }
1693 
1694 // Search ctxk for concrete implementations.  If there are klen or fewer,
1695 // pack them into the given array and return the number.
1696 // Otherwise, return -1, meaning the given array would overflow.
1697 // (Note that a return of 0 means there are exactly no concrete subtypes.)
1698 // In this search, if ctxk is concrete, it will be reported alone.
1699 // For any type CC reported, no proper subtypes of CC will be reported.
1700 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1701                                                    int klen,
1702                                                    Klass* karray[]) {
1703   ClassHierarchyWalker wf;
1704   wf.record_witnesses(klen);
1705   Klass* wit = wf.find_witness_subtype(ctxk);
1706   if (wit != NULL)  return -1;  // Too many witnesses.
1707   int num = wf.num_participants();
1708   assert(num <= klen, "oob");
1709   // Pack the result array with the good news.
1710   for (int i = 0; i < num; i++)
1711     karray[i] = wf.participant(i);
1712 #ifndef PRODUCT
1713   // Make sure the dependency mechanism will pass this discovery:
1714   if (VerifyDependencies) {
1715     // Turn off dependency tracing while actually testing deps.
1716     FlagSetting fs(TraceDependencies, false);
1717     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1718     case -1: // ctxk was itself concrete
1719       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1720       break;
1721     case 0:
1722       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1723                 "verify dep.");
1724       break;
1725     case 1:
1726       guarantee(NULL == (void *)
1727                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1728                 "verify dep.");
1729       break;
1730     case 2:
1731       guarantee(NULL == (void *)
1732                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
1733                                                                 karray[0],
1734                                                                 karray[1]),
1735                 "verify dep.");
1736       break;
1737     default:
1738       ShouldNotReachHere();  // klen > 2 yet supported
1739     }
1740   }
1741 #endif //PRODUCT
1742   return num;
1743 }
1744 
1745 // If a class (or interface) has a unique concrete method uniqm, return NULL.
1746 // Otherwise, return a class that contains an interfering method.
1747 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1748                                                     KlassDepChange* changes) {
1749   // Here is a missing optimization:  If uniqm->is_final(),
1750   // we don't really need to search beneath it for overrides.
1751   // This is probably not important, since we don't use dependencies
1752   // to track final methods.  (They can't be "definalized".)
1753   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1754   return wf.find_witness_definer(ctxk, changes);
1755 }
1756 
1757 // Find the set of all non-abstract methods under ctxk that match m.
1758 // (The method m must be defined or inherited in ctxk.)
1759 // Include m itself in the set, unless it is abstract.
1760 // If this set has exactly one element, return that element.
1761 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1762   // Return NULL if m is marked old; must have been a redefined method.
1763   if (m->is_old()) {
1764     return NULL;
1765   }
1766   ClassHierarchyWalker wf(m);
1767   assert(wf.check_method_context(ctxk, m), "proper context");
1768   wf.record_witnesses(1);
1769   Klass* wit = wf.find_witness_definer(ctxk);
1770   if (wit != NULL)  return NULL;  // Too many witnesses.
1771   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1772   if (Dependencies::is_concrete_method(m, ctxk)) {
1773     if (fm == NULL) {
1774       // It turns out that m was always the only implementation.
1775       fm = m;
1776     } else if (fm != m) {
1777       // Two conflicting implementations after all.
1778       // (This can happen if m is inherited into ctxk and fm overrides it.)
1779       return NULL;
1780     }
1781   }
1782 #ifndef PRODUCT
1783   // Make sure the dependency mechanism will pass this discovery:
1784   if (VerifyDependencies && fm != NULL) {
1785     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1786               "verify dep.");
1787   }
1788 #endif //PRODUCT
1789   return fm;
1790 }
1791 
1792 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1793                                                         Method* m1,
1794                                                         Method* m2,
1795                                                         KlassDepChange* changes) {
1796   ClassHierarchyWalker wf(m1);
1797   wf.add_participant(m1->method_holder());
1798   wf.add_participant(m2->method_holder());
1799   return wf.find_witness_definer(ctxk, changes);
1800 }
1801 
1802 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1803   Klass* search_at = ctxk;
1804   if (changes != NULL)
1805     search_at = changes->new_type(); // just look at the new bit
1806   return find_finalizable_subclass(search_at);
1807 }
1808 
1809 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1810   assert(call_site != NULL, "sanity");
1811   assert(method_handle != NULL, "sanity");
1812   assert(call_site->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1813 
1814   if (changes == NULL) {
1815     // Validate all CallSites
1816     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1817       return call_site->klass();  // assertion failed
1818   } else {
1819     // Validate the given CallSite
1820     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1821       assert(method_handle != changes->method_handle(), "must be");
1822       return call_site->klass();  // assertion failed
1823     }
1824   }
1825   return NULL;  // assertion still valid
1826 }
1827 
1828 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1829   if (witness != NULL) {
1830     if (TraceDependencies) {
1831       print_dependency(witness, /*verbose=*/ true);
1832     }
1833     // The following is a no-op unless logging is enabled:
1834     log_dependency(witness);
1835   }
1836 }
1837 
1838 
1839 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1840   assert_locked_or_safepoint(Compile_lock);
1841   Dependencies::check_valid_dependency_type(type());
1842 
1843   Klass* witness = NULL;
1844   switch (type()) {
1845   case evol_method:
1846     witness = check_evol_method(method_argument(0));
1847     break;
1848   case leaf_type:
1849     witness = check_leaf_type(context_type());
1850     break;
1851   case abstract_with_unique_concrete_subtype:
1852     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1853     break;
1854   case abstract_with_no_concrete_subtype:
1855     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1856     break;
1857   case concrete_with_no_concrete_subtype:
1858     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1859     break;
1860   case unique_concrete_method:
1861     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1862     break;
1863   case abstract_with_exclusive_concrete_subtypes_2:
1864     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1865     break;
1866   case exclusive_concrete_methods_2:
1867     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1868     break;
1869   case no_finalizable_subclasses:
1870     witness = check_has_no_finalizable_subclasses(context_type(), changes);
1871     break;
1872   default:
1873     witness = NULL;
1874     break;
1875   }
1876   trace_and_log_witness(witness);
1877   return witness;
1878 }
1879 
1880 
1881 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1882   assert_locked_or_safepoint(Compile_lock);
1883   Dependencies::check_valid_dependency_type(type());
1884 
1885   Klass* witness = NULL;
1886   switch (type()) {
1887   case call_site_target_value:
1888     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1889     break;
1890   default:
1891     witness = NULL;
1892     break;
1893   }
1894   trace_and_log_witness(witness);
1895   return witness;
1896 }
1897 
1898 
1899 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1900   // Handle klass dependency
1901   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1902     return check_klass_dependency(changes.as_klass_change());
1903 
1904   // Handle CallSite dependency
1905   if (changes.is_call_site_change())
1906     return check_call_site_dependency(changes.as_call_site_change());
1907 
1908   // irrelevant dependency; skip it
1909   return NULL;
1910 }
1911 
1912 
1913 void DepChange::print() {
1914   int nsup = 0, nint = 0;
1915   for (ContextStream str(*this); str.next(); ) {
1916     Klass* k = str.klass();
1917     switch (str.change_type()) {
1918     case Change_new_type:
1919       tty->print_cr("  dependee = %s", k->external_name());
1920       break;
1921     case Change_new_sub:
1922       if (!WizardMode) {
1923         ++nsup;
1924       } else {
1925         tty->print_cr("  context super = %s", k->external_name());
1926       }
1927       break;
1928     case Change_new_impl:
1929       if (!WizardMode) {
1930         ++nint;
1931       } else {
1932         tty->print_cr("  context interface = %s", k->external_name());
1933       }
1934       break;
1935     default:
1936       break;
1937     }
1938   }
1939   if (nsup + nint != 0) {
1940     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1941   }
1942 }
1943 
1944 void DepChange::ContextStream::start() {
1945   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1946   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1947   _klass = new_type;
1948   _ti_base = NULL;
1949   _ti_index = 0;
1950   _ti_limit = 0;
1951 }
1952 
1953 bool DepChange::ContextStream::next() {
1954   switch (_change_type) {
1955   case Start_Klass:             // initial state; _klass is the new type
1956     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1957     _ti_index = 0;
1958     _change_type = Change_new_type;
1959     return true;
1960   case Change_new_type:
1961     // fall through:
1962     _change_type = Change_new_sub;
1963   case Change_new_sub:
1964     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1965     {
1966       _klass = _klass->super();
1967       if (_klass != NULL) {
1968         return true;
1969       }
1970     }
1971     // else set up _ti_limit and fall through:
1972     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1973     _change_type = Change_new_impl;
1974   case Change_new_impl:
1975     if (_ti_index < _ti_limit) {
1976       _klass = _ti_base->at(_ti_index++);
1977       return true;
1978     }
1979     // fall through:
1980     _change_type = NO_CHANGE;  // iterator is exhausted
1981   case NO_CHANGE:
1982     break;
1983   default:
1984     ShouldNotReachHere();
1985   }
1986   return false;
1987 }
1988 
1989 void KlassDepChange::initialize() {
1990   // entire transaction must be under this lock:
1991   assert_lock_strong(Compile_lock);
1992 
1993   // Mark all dependee and all its superclasses
1994   // Mark transitive interfaces
1995   for (ContextStream str(*this); str.next(); ) {
1996     Klass* d = str.klass();
1997     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1998     InstanceKlass::cast(d)->set_is_marked_dependent(true);
1999   }
2000 }
2001 
2002 KlassDepChange::~KlassDepChange() {
2003   // Unmark all dependee and all its superclasses
2004   // Unmark transitive interfaces
2005   for (ContextStream str(*this); str.next(); ) {
2006     Klass* d = str.klass();
2007     InstanceKlass::cast(d)->set_is_marked_dependent(false);
2008   }
2009 }
2010 
2011 bool KlassDepChange::involves_context(Klass* k) {
2012   if (k == NULL || !k->is_instance_klass()) {
2013     return false;
2014   }
2015   InstanceKlass* ik = InstanceKlass::cast(k);
2016   bool is_contained = ik->is_marked_dependent();
2017   assert(is_contained == new_type()->is_subtype_of(k),
2018          "correct marking of potential context types");
2019   return is_contained;
2020 }
2021 
2022 #ifndef PRODUCT
2023 void Dependencies::print_statistics() {
2024   if (deps_find_witness_print != 0) {
2025     // Call one final time, to flush out the data.
2026     deps_find_witness_print = -1;
2027     count_find_witness_calls();
2028   }
2029 }
2030 #endif
2031 
2032 CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
2033   _call_site(call_site),
2034   _method_handle(method_handle) {
2035   assert(_call_site()->is_a(SystemDictionary::CallSite_klass()), "must be");
2036   assert(_method_handle.is_null() || _method_handle()->is_a(SystemDictionary::MethodHandle_klass()), "must be");
2037 }