1 /*
   2  * Copyright (c) 2005, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciArrayKlass.hpp"
  27 #include "ci/ciEnv.hpp"
  28 #include "ci/ciKlass.hpp"
  29 #include "ci/ciMethod.hpp"
  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "code/dependencies.hpp"
  32 #include "compiler/compileLog.hpp"
  33 #include "compiler/compileBroker.hpp"
  34 #include "compiler/compileTask.hpp"
  35 #include "memory/resourceArea.hpp"
  36 #include "oops/oop.inline.hpp"
  37 #include "oops/objArrayKlass.hpp"
  38 #include "runtime/handles.hpp"
  39 #include "runtime/handles.inline.hpp"
  40 #include "runtime/thread.inline.hpp"
  41 #include "utilities/copy.hpp"
  42 
  43 
  44 #ifdef ASSERT
  45 static bool must_be_in_vm() {
  46   Thread* thread = Thread::current();
  47   if (thread->is_Java_thread())
  48     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  49   else
  50     return true;  //something like this: thread->is_VM_thread();
  51 }
  52 #endif //ASSERT
  53 
  54 void Dependencies::initialize(ciEnv* env) {
  55   Arena* arena = env->arena();
  56   _oop_recorder = env->oop_recorder();
  57   _log = env->log();
  58   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  59 #if INCLUDE_JVMCI
  60   _using_dep_values = false;
  61 #endif
  62   DEBUG_ONLY(_deps[end_marker] = NULL);
  63   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  64     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  65   }
  66   _content_bytes = NULL;
  67   _size_in_bytes = (size_t)-1;
  68 
  69   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  70 }
  71 
  72 void Dependencies::assert_evol_method(ciMethod* m) {
  73   assert_common_1(evol_method, m);
  74 }
  75 
  76 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  77   if (ctxk->is_array_klass()) {
  78     // As a special case, support this assertion on an array type,
  79     // which reduces to an assertion on its element type.
  80     // Note that this cannot be done with assertions that
  81     // relate to concreteness or abstractness.
  82     ciType* elemt = ctxk->as_array_klass()->base_element_type();
  83     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
  84     ctxk = elemt->as_instance_klass();
  85     //if (ctxk->is_final())  return;            // Ex:  String[][]
  86   }
  87   check_ctxk(ctxk);
  88   assert_common_1(leaf_type, ctxk);
  89 }
  90 
  91 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  92   check_ctxk_abstract(ctxk);
  93   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  94 }
  95 
  96 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
  97   check_ctxk_abstract(ctxk);
  98   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
  99 }
 100 
 101 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
 102   check_ctxk_concrete(ctxk);
 103   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
 104 }
 105 
 106 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
 107   check_ctxk(ctxk);
 108   assert_common_2(unique_concrete_method, ctxk, uniqm);
 109 }
 110 
 111 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
 112   check_ctxk(ctxk);
 113   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
 114 }
 115 
 116 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
 117   check_ctxk(ctxk);
 118   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
 119 }
 120 
 121 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
 122   check_ctxk(ctxk);
 123   assert_common_1(no_finalizable_subclasses, ctxk);
 124 }
 125 
 126 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
 127   assert_common_2(call_site_target_value, call_site, method_handle);
 128 }
 129 
 130 #if INCLUDE_JVMCI
 131 
 132 Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
 133   _oop_recorder = oop_recorder;
 134   _log = log;
 135   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
 136   _using_dep_values = true;
 137   DEBUG_ONLY(_dep_values[end_marker] = NULL);
 138   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
 139     _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
 140   }
 141   _content_bytes = NULL;
 142   _size_in_bytes = (size_t)-1;
 143 
 144   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
 145 }
 146 
 147 void Dependencies::assert_evol_method(Method* m) {
 148   assert_common_1(evol_method, DepValue(_oop_recorder, m));
 149 }
 150 
 151 void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
 152   check_ctxk(ctxk);
 153   assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
 154 }
 155 
 156 void Dependencies::assert_leaf_type(Klass* ctxk) {
 157   if (ctxk->is_array_klass()) {
 158     // As a special case, support this assertion on an array type,
 159     // which reduces to an assertion on its element type.
 160     // Note that this cannot be done with assertions that
 161     // relate to concreteness or abstractness.
 162     BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
 163     if (is_java_primitive(elemt))  return;   // Ex:  int[][]
 164     ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
 165     //if (ctxk->is_final())  return;            // Ex:  String[][]
 166   }
 167   check_ctxk(ctxk);
 168   assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
 169 }
 170 
 171 void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
 172   check_ctxk_abstract(ctxk);
 173   DepValue ctxk_dv(_oop_recorder, ctxk);
 174   DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
 175   assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
 176 }
 177 
 178 void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
 179   check_ctxk(ctxk);
 180   assert_common_2(unique_concrete_method, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
 181 }
 182 
 183 void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
 184   assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
 185 }
 186 
 187 #endif // INCLUDE_JVMCI
 188 
 189 
 190 // Helper function.  If we are adding a new dep. under ctxk2,
 191 // try to find an old dep. under a broader* ctxk1.  If there is
 192 //
 193 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
 194                                     int ctxk_i, ciKlass* ctxk2) {
 195   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
 196   if (ctxk2->is_subtype_of(ctxk1)) {
 197     return true;  // success, and no need to change
 198   } else if (ctxk1->is_subtype_of(ctxk2)) {
 199     // new context class fully subsumes previous one
 200     deps->at_put(ctxk_i, ctxk2);
 201     return true;
 202   } else {
 203     return false;
 204   }
 205 }
 206 
 207 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
 208   assert(dep_args(dept) == 1, "sanity");
 209   log_dependency(dept, x);
 210   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 211 
 212   // see if the same (or a similar) dep is already recorded
 213   if (note_dep_seen(dept, x)) {
 214     assert(deps->find(x) >= 0, "sanity");
 215   } else {
 216     deps->append(x);
 217   }
 218 }
 219 
 220 void Dependencies::assert_common_2(DepType dept,
 221                                    ciBaseObject* x0, ciBaseObject* x1) {
 222   assert(dep_args(dept) == 2, "sanity");
 223   log_dependency(dept, x0, x1);
 224   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 225 
 226   // see if the same (or a similar) dep is already recorded
 227   bool has_ctxk = has_explicit_context_arg(dept);
 228   if (has_ctxk) {
 229     assert(dep_context_arg(dept) == 0, "sanity");
 230     if (note_dep_seen(dept, x1)) {
 231       // look in this bucket for redundant assertions
 232       const int stride = 2;
 233       for (int i = deps->length(); (i -= stride) >= 0; ) {
 234         ciBaseObject* y1 = deps->at(i+1);
 235         if (x1 == y1) {  // same subject; check the context
 236           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
 237             return;
 238           }
 239         }
 240       }
 241     }
 242   } else {
 243     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 244       // look in this bucket for redundant assertions
 245       const int stride = 2;
 246       for (int i = deps->length(); (i -= stride) >= 0; ) {
 247         ciBaseObject* y0 = deps->at(i+0);
 248         ciBaseObject* y1 = deps->at(i+1);
 249         if (x0 == y0 && x1 == y1) {
 250           return;
 251         }
 252       }
 253     }
 254   }
 255 
 256   // append the assertion in the correct bucket:
 257   deps->append(x0);
 258   deps->append(x1);
 259 }
 260 
 261 void Dependencies::assert_common_3(DepType dept,
 262                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
 263   assert(dep_context_arg(dept) == 0, "sanity");
 264   assert(dep_args(dept) == 3, "sanity");
 265   log_dependency(dept, ctxk, x, x2);
 266   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 267 
 268   // try to normalize an unordered pair:
 269   bool swap = false;
 270   switch (dept) {
 271   case abstract_with_exclusive_concrete_subtypes_2:
 272     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
 273     break;
 274   case exclusive_concrete_methods_2:
 275     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
 276     break;
 277   default:
 278     break;
 279   }
 280   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
 281 
 282   // see if the same (or a similar) dep is already recorded
 283   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
 284     // look in this bucket for redundant assertions
 285     const int stride = 3;
 286     for (int i = deps->length(); (i -= stride) >= 0; ) {
 287       ciBaseObject* y  = deps->at(i+1);
 288       ciBaseObject* y2 = deps->at(i+2);
 289       if (x == y && x2 == y2) {  // same subjects; check the context
 290         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
 291           return;
 292         }
 293       }
 294     }
 295   }
 296   // append the assertion in the correct bucket:
 297   deps->append(ctxk);
 298   deps->append(x);
 299   deps->append(x2);
 300 }
 301 
 302 #if INCLUDE_JVMCI
 303 bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
 304                                     int ctxk_i, DepValue ctxk2_dv) {
 305   Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
 306   Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
 307   if (ctxk2->is_subtype_of(ctxk1)) {
 308     return true;  // success, and no need to change
 309   } else if (ctxk1->is_subtype_of(ctxk2)) {
 310     // new context class fully subsumes previous one
 311     deps->at_put(ctxk_i, ctxk2_dv);
 312     return true;
 313   } else {
 314     return false;
 315   }
 316 }
 317 
 318 void Dependencies::assert_common_1(DepType dept, DepValue x) {
 319   assert(dep_args(dept) == 1, "sanity");
 320   //log_dependency(dept, x);
 321   GrowableArray<DepValue>* deps = _dep_values[dept];
 322 
 323   // see if the same (or a similar) dep is already recorded
 324   if (note_dep_seen(dept, x)) {
 325     assert(deps->find(x) >= 0, "sanity");
 326   } else {
 327     deps->append(x);
 328   }
 329 }
 330 
 331 void Dependencies::assert_common_2(DepType dept,
 332                                    DepValue x0, DepValue x1) {
 333   assert(dep_args(dept) == 2, "sanity");
 334   //log_dependency(dept, x0, x1);
 335   GrowableArray<DepValue>* deps = _dep_values[dept];
 336 
 337   // see if the same (or a similar) dep is already recorded
 338   bool has_ctxk = has_explicit_context_arg(dept);
 339   if (has_ctxk) {
 340     assert(dep_context_arg(dept) == 0, "sanity");
 341     if (note_dep_seen(dept, x1)) {
 342       // look in this bucket for redundant assertions
 343       const int stride = 2;
 344       for (int i = deps->length(); (i -= stride) >= 0; ) {
 345         DepValue y1 = deps->at(i+1);
 346         if (x1 == y1) {  // same subject; check the context
 347           if (maybe_merge_ctxk(deps, i+0, x0)) {
 348             return;
 349           }
 350         }
 351       }
 352     }
 353   } else {
 354     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 355       // look in this bucket for redundant assertions
 356       const int stride = 2;
 357       for (int i = deps->length(); (i -= stride) >= 0; ) {
 358         DepValue y0 = deps->at(i+0);
 359         DepValue y1 = deps->at(i+1);
 360         if (x0 == y0 && x1 == y1) {
 361           return;
 362         }
 363       }
 364     }
 365   }
 366 
 367   // append the assertion in the correct bucket:
 368   deps->append(x0);
 369   deps->append(x1);
 370 }
 371 #endif // INCLUDE_JVMCI
 372 
 373 /// Support for encoding dependencies into an nmethod:
 374 
 375 void Dependencies::copy_to(nmethod* nm) {
 376   address beg = nm->dependencies_begin();
 377   address end = nm->dependencies_end();
 378   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
 379   Copy::disjoint_words((HeapWord*) content_bytes(),
 380                        (HeapWord*) beg,
 381                        size_in_bytes() / sizeof(HeapWord));
 382   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
 383 }
 384 
 385 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
 386   for (int i = 0; i < narg; i++) {
 387     int diff = p1[i]->ident() - p2[i]->ident();
 388     if (diff != 0)  return diff;
 389   }
 390   return 0;
 391 }
 392 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
 393 { return sort_dep(p1, p2, 1); }
 394 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
 395 { return sort_dep(p1, p2, 2); }
 396 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
 397 { return sort_dep(p1, p2, 3); }
 398 
 399 #if INCLUDE_JVMCI
 400 // metadata deps are sorted before object deps
 401 static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
 402   for (int i = 0; i < narg; i++) {
 403     int diff = p1[i].sort_key() - p2[i].sort_key();
 404     if (diff != 0)  return diff;
 405   }
 406   return 0;
 407 }
 408 static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 409 { return sort_dep_value(p1, p2, 1); }
 410 static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 411 { return sort_dep_value(p1, p2, 2); }
 412 static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 413 { return sort_dep_value(p1, p2, 3); }
 414 #endif // INCLUDE_JVMCI
 415 
 416 void Dependencies::sort_all_deps() {
 417 #if INCLUDE_JVMCI
 418   if (_using_dep_values) {
 419     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 420       DepType dept = (DepType)deptv;
 421       GrowableArray<DepValue>* deps = _dep_values[dept];
 422       if (deps->length() <= 1)  continue;
 423       switch (dep_args(dept)) {
 424       case 1: deps->sort(sort_dep_value_arg_1, 1); break;
 425       case 2: deps->sort(sort_dep_value_arg_2, 2); break;
 426       case 3: deps->sort(sort_dep_value_arg_3, 3); break;
 427       default: ShouldNotReachHere(); break;
 428       }
 429     }
 430     return;
 431   }
 432 #endif // INCLUDE_JVMCI
 433   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 434     DepType dept = (DepType)deptv;
 435     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 436     if (deps->length() <= 1)  continue;
 437     switch (dep_args(dept)) {
 438     case 1: deps->sort(sort_dep_arg_1, 1); break;
 439     case 2: deps->sort(sort_dep_arg_2, 2); break;
 440     case 3: deps->sort(sort_dep_arg_3, 3); break;
 441     default: ShouldNotReachHere(); break;
 442     }
 443   }
 444 }
 445 
 446 size_t Dependencies::estimate_size_in_bytes() {
 447   size_t est_size = 100;
 448 #if INCLUDE_JVMCI
 449   if (_using_dep_values) {
 450     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 451       DepType dept = (DepType)deptv;
 452       GrowableArray<DepValue>* deps = _dep_values[dept];
 453       est_size += deps->length() * 2;  // tags and argument(s)
 454     }
 455     return est_size;
 456   }
 457 #endif // INCLUDE_JVMCI
 458   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 459     DepType dept = (DepType)deptv;
 460     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 461     est_size += deps->length()*2;  // tags and argument(s)
 462   }
 463   return est_size;
 464 }
 465 
 466 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
 467   switch (dept) {
 468   case abstract_with_exclusive_concrete_subtypes_2:
 469     return x->as_metadata()->as_klass();
 470   case unique_concrete_method:
 471   case exclusive_concrete_methods_2:
 472     return x->as_metadata()->as_method()->holder();
 473   default:
 474     return NULL;  // let NULL be NULL
 475   }
 476 }
 477 
 478 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
 479   assert(must_be_in_vm(), "raw oops here");
 480   switch (dept) {
 481   case abstract_with_exclusive_concrete_subtypes_2:
 482     assert(x->is_klass(), "sanity");
 483     return (Klass*) x;
 484   case unique_concrete_method:
 485   case exclusive_concrete_methods_2:
 486     assert(x->is_method(), "sanity");
 487     return ((Method*)x)->method_holder();
 488   default:
 489     return NULL;  // let NULL be NULL
 490   }
 491 }
 492 
 493 void Dependencies::encode_content_bytes() {
 494   sort_all_deps();
 495 
 496   // cast is safe, no deps can overflow INT_MAX
 497   CompressedWriteStream bytes((int)estimate_size_in_bytes());
 498 
 499 #if INCLUDE_JVMCI
 500   if (_using_dep_values) {
 501     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 502       DepType dept = (DepType)deptv;
 503       GrowableArray<DepValue>* deps = _dep_values[dept];
 504       if (deps->length() == 0)  continue;
 505       int stride = dep_args(dept);
 506       int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 507       assert(stride > 0, "sanity");
 508       for (int i = 0; i < deps->length(); i += stride) {
 509         jbyte code_byte = (jbyte)dept;
 510         int skipj = -1;
 511         if (ctxkj >= 0 && ctxkj+1 < stride) {
 512           Klass*  ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
 513           DepValue x = deps->at(i+ctxkj+1);  // following argument
 514           if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
 515             skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 516             code_byte |= default_context_type_bit;
 517           }
 518         }
 519         bytes.write_byte(code_byte);
 520         for (int j = 0; j < stride; j++) {
 521           if (j == skipj)  continue;
 522           DepValue v = deps->at(i+j);
 523           int idx = v.index();
 524           bytes.write_int(idx);
 525         }
 526       }
 527     }
 528   } else {
 529 #endif // INCLUDE_JVMCI
 530   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 531     DepType dept = (DepType)deptv;
 532     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 533     if (deps->length() == 0)  continue;
 534     int stride = dep_args(dept);
 535     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 536     assert(stride > 0, "sanity");
 537     for (int i = 0; i < deps->length(); i += stride) {
 538       jbyte code_byte = (jbyte)dept;
 539       int skipj = -1;
 540       if (ctxkj >= 0 && ctxkj+1 < stride) {
 541         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
 542         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
 543         if (ctxk == ctxk_encoded_as_null(dept, x)) {
 544           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 545           code_byte |= default_context_type_bit;
 546         }
 547       }
 548       bytes.write_byte(code_byte);
 549       for (int j = 0; j < stride; j++) {
 550         if (j == skipj)  continue;
 551         ciBaseObject* v = deps->at(i+j);
 552         int idx;
 553         if (v->is_object()) {
 554           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
 555         } else {
 556           ciMetadata* meta = v->as_metadata();
 557           idx = _oop_recorder->find_index(meta->constant_encoding());
 558         }
 559         bytes.write_int(idx);
 560       }
 561     }
 562   }
 563 #if INCLUDE_JVMCI
 564   }
 565 #endif
 566 
 567   // write a sentinel byte to mark the end
 568   bytes.write_byte(end_marker);
 569 
 570   // round it out to a word boundary
 571   while (bytes.position() % sizeof(HeapWord) != 0) {
 572     bytes.write_byte(end_marker);
 573   }
 574 
 575   // check whether the dept byte encoding really works
 576   assert((jbyte)default_context_type_bit != 0, "byte overflow");
 577 
 578   _content_bytes = bytes.buffer();
 579   _size_in_bytes = bytes.position();
 580 }
 581 
 582 
 583 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
 584   "end_marker",
 585   "evol_method",
 586   "leaf_type",
 587   "abstract_with_unique_concrete_subtype",
 588   "abstract_with_no_concrete_subtype",
 589   "concrete_with_no_concrete_subtype",
 590   "unique_concrete_method",
 591   "abstract_with_exclusive_concrete_subtypes_2",
 592   "exclusive_concrete_methods_2",
 593   "no_finalizable_subclasses",
 594   "call_site_target_value"
 595 };
 596 
 597 int Dependencies::_dep_args[TYPE_LIMIT] = {
 598   -1,// end_marker
 599   1, // evol_method m
 600   1, // leaf_type ctxk
 601   2, // abstract_with_unique_concrete_subtype ctxk, k
 602   1, // abstract_with_no_concrete_subtype ctxk
 603   1, // concrete_with_no_concrete_subtype ctxk
 604   2, // unique_concrete_method ctxk, m
 605   3, // unique_concrete_subtypes_2 ctxk, k1, k2
 606   3, // unique_concrete_methods_2 ctxk, m1, m2
 607   1, // no_finalizable_subclasses ctxk
 608   2  // call_site_target_value call_site, method_handle
 609 };
 610 
 611 const char* Dependencies::dep_name(Dependencies::DepType dept) {
 612   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
 613   return _dep_name[dept];
 614 }
 615 
 616 int Dependencies::dep_args(Dependencies::DepType dept) {
 617   if (!dept_in_mask(dept, all_types))  return -1;
 618   return _dep_args[dept];
 619 }
 620 
 621 void Dependencies::check_valid_dependency_type(DepType dept) {
 622   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
 623 }
 624 
 625 Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, bool counter_changed, char** failure_detail) {
 626   // First, check non-klass dependencies as we might return early and
 627   // not check klass dependencies if the system dictionary
 628   // modification counter hasn't changed (see below).
 629   for (Dependencies::DepStream deps(this); deps.next(); ) {
 630     if (deps.is_klass_type())  continue;  // skip klass dependencies
 631     Klass* witness = deps.check_dependency();
 632     if (witness != NULL) {
 633       return deps.type();
 634     }
 635   }
 636 
 637   // Klass dependencies must be checked when the system dictionary
 638   // changes.  If logging is enabled all violated dependences will be
 639   // recorded in the log.  In debug mode check dependencies even if
 640   // the system dictionary hasn't changed to verify that no invalid
 641   // dependencies were inserted.  Any violated dependences in this
 642   // case are dumped to the tty.
 643   if (!counter_changed && !trueInDebug) {
 644     return end_marker;
 645   }
 646 
 647   int klass_violations = 0;
 648   DepType result = end_marker;
 649   for (Dependencies::DepStream deps(this); deps.next(); ) {
 650     if (!deps.is_klass_type())  continue;  // skip non-klass dependencies
 651     Klass* witness = deps.check_dependency();
 652     if (witness != NULL) {
 653       if (klass_violations == 0) {
 654         result = deps.type();
 655         if (failure_detail != NULL && klass_violations == 0) {
 656           // Use a fixed size buffer to prevent the string stream from
 657           // resizing in the context of an inner resource mark.
 658           char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN);
 659           stringStream st(buffer, O_BUFLEN);
 660           deps.print_dependency(witness, true, &st);
 661           *failure_detail = st.as_string();
 662         }
 663       }
 664       klass_violations++;
 665       if (!counter_changed) {
 666         // Dependence failed but counter didn't change.  Log a message
 667         // describing what failed and allow the assert at the end to
 668         // trigger.
 669         deps.print_dependency(witness);
 670       } else if (xtty == NULL) {
 671         // If we're not logging then a single violation is sufficient,
 672         // otherwise we want to log all the dependences which were
 673         // violated.
 674         break;
 675       }
 676     }
 677   }
 678 
 679   if (klass_violations != 0) {
 680 #ifdef ASSERT
 681     if (task != NULL && !counter_changed && !PrintCompilation) {
 682       // Print out the compile task that failed
 683       task->print_tty();
 684     }
 685 #endif
 686     assert(counter_changed, "failed dependencies, but counter didn't change");
 687   }
 688   return result;
 689 }
 690 
 691 // for the sake of the compiler log, print out current dependencies:
 692 void Dependencies::log_all_dependencies() {
 693   if (log() == NULL)  return;
 694   ResourceMark rm;
 695   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 696     DepType dept = (DepType)deptv;
 697     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 698     int deplen = deps->length();
 699     if (deplen == 0) {
 700       continue;
 701     }
 702     int stride = dep_args(dept);
 703     GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
 704     for (int i = 0; i < deps->length(); i += stride) {
 705       for (int j = 0; j < stride; j++) {
 706         // flush out the identities before printing
 707         ciargs->push(deps->at(i+j));
 708       }
 709       write_dependency_to(log(), dept, ciargs);
 710       ciargs->clear();
 711     }
 712     guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
 713   }
 714 }
 715 
 716 void Dependencies::write_dependency_to(CompileLog* log,
 717                                        DepType dept,
 718                                        GrowableArray<DepArgument>* args,
 719                                        Klass* witness) {
 720   if (log == NULL) {
 721     return;
 722   }
 723   ResourceMark rm;
 724   ciEnv* env = ciEnv::current();
 725   GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
 726   for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
 727     DepArgument arg = *it;
 728     if (arg.is_oop()) {
 729       ciargs->push(env->get_object(arg.oop_value()));
 730     } else {
 731       ciargs->push(env->get_metadata(arg.metadata_value()));
 732     }
 733   }
 734   int argslen = ciargs->length();
 735   Dependencies::write_dependency_to(log, dept, ciargs, witness);
 736   guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
 737 }
 738 
 739 void Dependencies::write_dependency_to(CompileLog* log,
 740                                        DepType dept,
 741                                        GrowableArray<ciBaseObject*>* args,
 742                                        Klass* witness) {
 743   if (log == NULL) {
 744     return;
 745   }
 746   ResourceMark rm;
 747   GrowableArray<int>* argids = new GrowableArray<int>(args->length());
 748   for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
 749     ciBaseObject* obj = *it;
 750     if (obj->is_object()) {
 751       argids->push(log->identify(obj->as_object()));
 752     } else {
 753       argids->push(log->identify(obj->as_metadata()));
 754     }
 755   }
 756   if (witness != NULL) {
 757     log->begin_elem("dependency_failed");
 758   } else {
 759     log->begin_elem("dependency");
 760   }
 761   log->print(" type='%s'", dep_name(dept));
 762   const int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 763   if (ctxkj >= 0 && ctxkj < argids->length()) {
 764     log->print(" ctxk='%d'", argids->at(ctxkj));
 765   }
 766   // write remaining arguments, if any.
 767   for (int j = 0; j < argids->length(); j++) {
 768     if (j == ctxkj)  continue;  // already logged
 769     if (j == 1) {
 770       log->print(  " x='%d'",    argids->at(j));
 771     } else {
 772       log->print(" x%d='%d'", j, argids->at(j));
 773     }
 774   }
 775   if (witness != NULL) {
 776     log->object("witness", witness);
 777     log->stamp();
 778   }
 779   log->end_elem();
 780 }
 781 
 782 void Dependencies::write_dependency_to(xmlStream* xtty,
 783                                        DepType dept,
 784                                        GrowableArray<DepArgument>* args,
 785                                        Klass* witness) {
 786   if (xtty == NULL) {
 787     return;
 788   }
 789   Thread* thread = Thread::current();
 790   HandleMark rm(thread);
 791   ttyLocker ttyl;
 792   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 793   if (witness != NULL) {
 794     xtty->begin_elem("dependency_failed");
 795   } else {
 796     xtty->begin_elem("dependency");
 797   }
 798   xtty->print(" type='%s'", dep_name(dept));
 799   if (ctxkj >= 0) {
 800     xtty->object("ctxk", args->at(ctxkj).metadata_value());
 801   }
 802   // write remaining arguments, if any.
 803   for (int j = 0; j < args->length(); j++) {
 804     if (j == ctxkj)  continue;  // already logged
 805     DepArgument arg = args->at(j);
 806     if (j == 1) {
 807       if (arg.is_oop()) {
 808         xtty->object("x", Handle(thread, arg.oop_value()));
 809       } else {
 810         xtty->object("x", arg.metadata_value());
 811       }
 812     } else {
 813       char xn[12]; sprintf(xn, "x%d", j);
 814       if (arg.is_oop()) {
 815         xtty->object(xn, Handle(thread, arg.oop_value()));
 816       } else {
 817         xtty->object(xn, arg.metadata_value());
 818       }
 819     }
 820   }
 821   if (witness != NULL) {
 822     xtty->object("witness", witness);
 823     xtty->stamp();
 824   }
 825   xtty->end_elem();
 826 }
 827 
 828 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
 829                                     Klass* witness, outputStream* st) {
 830   ResourceMark rm;
 831   ttyLocker ttyl;   // keep the following output all in one block
 832   st->print_cr("%s of type %s",
 833                 (witness == NULL)? "Dependency": "Failed dependency",
 834                 dep_name(dept));
 835   // print arguments
 836   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 837   for (int j = 0; j < args->length(); j++) {
 838     DepArgument arg = args->at(j);
 839     bool put_star = false;
 840     if (arg.is_null())  continue;
 841     const char* what;
 842     if (j == ctxkj) {
 843       assert(arg.is_metadata(), "must be");
 844       what = "context";
 845       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
 846     } else if (arg.is_method()) {
 847       what = "method ";
 848       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
 849     } else if (arg.is_klass()) {
 850       what = "class  ";
 851     } else {
 852       what = "object ";
 853     }
 854     st->print("  %s = %s", what, (put_star? "*": ""));
 855     if (arg.is_klass()) {
 856       st->print("%s", ((Klass*)arg.metadata_value())->external_name());
 857     } else if (arg.is_method()) {
 858       ((Method*)arg.metadata_value())->print_value_on(st);
 859     } else if (arg.is_oop()) {
 860       arg.oop_value()->print_value_on(st);
 861     } else {
 862       ShouldNotReachHere(); // Provide impl for this type.
 863     }
 864 
 865     st->cr();
 866   }
 867   if (witness != NULL) {
 868     bool put_star = !Dependencies::is_concrete_klass(witness);
 869     st->print_cr("  witness = %s%s",
 870                   (put_star? "*": ""),
 871                   witness->external_name());
 872   }
 873 }
 874 
 875 void Dependencies::DepStream::log_dependency(Klass* witness) {
 876   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
 877   ResourceMark rm;
 878   const int nargs = argument_count();
 879   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 880   for (int j = 0; j < nargs; j++) {
 881     if (is_oop_argument(j)) {
 882       args->push(argument_oop(j));
 883     } else {
 884       args->push(argument(j));
 885     }
 886   }
 887   int argslen = args->length();
 888   if (_deps != NULL && _deps->log() != NULL) {
 889     if (ciEnv::current() != NULL) {
 890       Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
 891     } else {
 892       // Treat the CompileLog as an xmlstream instead
 893       Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
 894     }
 895   } else {
 896     Dependencies::write_dependency_to(xtty, type(), args, witness);
 897   }
 898   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 899 }
 900 
 901 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) {
 902   ResourceMark rm;
 903   int nargs = argument_count();
 904   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 905   for (int j = 0; j < nargs; j++) {
 906     if (is_oop_argument(j)) {
 907       args->push(argument_oop(j));
 908     } else {
 909       args->push(argument(j));
 910     }
 911   }
 912   int argslen = args->length();
 913   Dependencies::print_dependency(type(), args, witness, st);
 914   if (verbose) {
 915     if (_code != NULL) {
 916       st->print("  code: ");
 917       _code->print_value_on(st);
 918       st->cr();
 919     }
 920   }
 921   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 922 }
 923 
 924 
 925 /// Dependency stream support (decodes dependencies from an nmethod):
 926 
 927 #ifdef ASSERT
 928 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
 929   assert(must_be_in_vm(), "raw oops here");
 930   _byte_limit = byte_limit;
 931   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
 932   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
 933 }
 934 #endif //ASSERT
 935 
 936 bool Dependencies::DepStream::next() {
 937   assert(_type != end_marker, "already at end");
 938   if (_bytes.position() == 0 && _code != NULL
 939       && _code->dependencies_size() == 0) {
 940     // Method has no dependencies at all.
 941     return false;
 942   }
 943   int code_byte = (_bytes.read_byte() & 0xFF);
 944   if (code_byte == end_marker) {
 945     DEBUG_ONLY(_type = end_marker);
 946     return false;
 947   } else {
 948     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
 949     code_byte -= ctxk_bit;
 950     DepType dept = (DepType)code_byte;
 951     _type = dept;
 952     Dependencies::check_valid_dependency_type(dept);
 953     int stride = _dep_args[dept];
 954     assert(stride == dep_args(dept), "sanity");
 955     int skipj = -1;
 956     if (ctxk_bit != 0) {
 957       skipj = 0;  // currently the only context argument is at zero
 958       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
 959     }
 960     for (int j = 0; j < stride; j++) {
 961       _xi[j] = (j == skipj)? 0: _bytes.read_int();
 962     }
 963     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
 964     return true;
 965   }
 966 }
 967 
 968 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
 969   Metadata* o = NULL;
 970   if (_code != NULL) {
 971     o = _code->metadata_at(i);
 972   } else {
 973     o = _deps->oop_recorder()->metadata_at(i);
 974   }
 975   return o;
 976 }
 977 
 978 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
 979   return (_code != NULL)
 980          ? _code->oop_at(i)
 981     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
 982 }
 983 
 984 Metadata* Dependencies::DepStream::argument(int i) {
 985   Metadata* result = recorded_metadata_at(argument_index(i));
 986 
 987   if (result == NULL) { // Explicit context argument can be compressed
 988     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 989     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
 990       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
 991     }
 992   }
 993 
 994   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
 995   return result;
 996 }
 997 
 998 /**
 999  * Returns a unique identifier for each dependency argument.
1000  */
1001 uintptr_t Dependencies::DepStream::get_identifier(int i) {
1002   if (is_oop_argument(i)) {
1003     return (uintptr_t)(oopDesc*)argument_oop(i);
1004   } else {
1005     return (uintptr_t)argument(i);
1006   }
1007 }
1008 
1009 oop Dependencies::DepStream::argument_oop(int i) {
1010   oop result = recorded_oop_at(argument_index(i));
1011   assert(oopDesc::is_oop_or_null(result), "must be");
1012   return result;
1013 }
1014 
1015 Klass* Dependencies::DepStream::context_type() {
1016   assert(must_be_in_vm(), "raw oops here");
1017 
1018   // Most dependencies have an explicit context type argument.
1019   {
1020     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
1021     if (ctxkj >= 0) {
1022       Metadata* k = argument(ctxkj);
1023       assert(k != NULL && k->is_klass(), "type check");
1024       return (Klass*)k;
1025     }
1026   }
1027 
1028   // Some dependencies are using the klass of the first object
1029   // argument as implicit context type.
1030   {
1031     int ctxkj = dep_implicit_context_arg(type());
1032     if (ctxkj >= 0) {
1033       Klass* k = argument_oop(ctxkj)->klass();
1034       assert(k != NULL && k->is_klass(), "type check");
1035       return (Klass*) k;
1036     }
1037   }
1038 
1039   // And some dependencies don't have a context type at all,
1040   // e.g. evol_method.
1041   return NULL;
1042 }
1043 
1044 // ----------------- DependencySignature --------------------------------------
1045 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
1046   if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
1047     return false;
1048   }
1049 
1050   for (int i = 0; i < s1.args_count(); i++) {
1051     if (s1.arg(i) != s2.arg(i)) {
1052       return false;
1053     }
1054   }
1055   return true;
1056 }
1057 
1058 /// Checking dependencies:
1059 
1060 // This hierarchy walker inspects subtypes of a given type,
1061 // trying to find a "bad" class which breaks a dependency.
1062 // Such a class is called a "witness" to the broken dependency.
1063 // While searching around, we ignore "participants", which
1064 // are already known to the dependency.
1065 class ClassHierarchyWalker {
1066  public:
1067   enum { PARTICIPANT_LIMIT = 3 };
1068 
1069  private:
1070   // optional method descriptor to check for:
1071   Symbol* _name;
1072   Symbol* _signature;
1073 
1074   // special classes which are not allowed to be witnesses:
1075   Klass*    _participants[PARTICIPANT_LIMIT+1];
1076   int       _num_participants;
1077 
1078   // cache of method lookups
1079   Method* _found_methods[PARTICIPANT_LIMIT+1];
1080 
1081   // if non-zero, tells how many witnesses to convert to participants
1082   int       _record_witnesses;
1083 
1084   void initialize(Klass* participant) {
1085     _record_witnesses = 0;
1086     _participants[0]  = participant;
1087     _found_methods[0] = NULL;
1088     _num_participants = 0;
1089     if (participant != NULL) {
1090       // Terminating NULL.
1091       _participants[1] = NULL;
1092       _found_methods[1] = NULL;
1093       _num_participants = 1;
1094     }
1095   }
1096 
1097   void initialize_from_method(Method* m) {
1098     assert(m != NULL && m->is_method(), "sanity");
1099     _name      = m->name();
1100     _signature = m->signature();
1101   }
1102 
1103  public:
1104   // The walker is initialized to recognize certain methods and/or types
1105   // as friendly participants.
1106   ClassHierarchyWalker(Klass* participant, Method* m) {
1107     initialize_from_method(m);
1108     initialize(participant);
1109   }
1110   ClassHierarchyWalker(Method* m) {
1111     initialize_from_method(m);
1112     initialize(NULL);
1113   }
1114   ClassHierarchyWalker(Klass* participant = NULL) {
1115     _name      = NULL;
1116     _signature = NULL;
1117     initialize(participant);
1118   }
1119 
1120   // This is common code for two searches:  One for concrete subtypes,
1121   // the other for concrete method implementations and overrides.
1122   bool doing_subtype_search() {
1123     return _name == NULL;
1124   }
1125 
1126   int num_participants() { return _num_participants; }
1127   Klass* participant(int n) {
1128     assert((uint)n <= (uint)_num_participants, "oob");
1129     return _participants[n];
1130   }
1131 
1132   // Note:  If n==num_participants, returns NULL.
1133   Method* found_method(int n) {
1134     assert((uint)n <= (uint)_num_participants, "oob");
1135     Method* fm = _found_methods[n];
1136     assert(n == _num_participants || fm != NULL, "proper usage");
1137     if (fm != NULL && fm->method_holder() != _participants[n]) {
1138       // Default methods from interfaces can be added to classes. In
1139       // that case the holder of the method is not the class but the
1140       // interface where it's defined.
1141       assert(fm->is_default_method(), "sanity");
1142       return NULL;
1143     }
1144     return fm;
1145   }
1146 
1147 #ifdef ASSERT
1148   // Assert that m is inherited into ctxk, without intervening overrides.
1149   // (May return true even if this is not true, in corner cases where we punt.)
1150   bool check_method_context(Klass* ctxk, Method* m) {
1151     if (m->method_holder() == ctxk)
1152       return true;  // Quick win.
1153     if (m->is_private())
1154       return false; // Quick lose.  Should not happen.
1155     if (!(m->is_public() || m->is_protected()))
1156       // The override story is complex when packages get involved.
1157       return true;  // Must punt the assertion to true.
1158     Method* lm = ctxk->lookup_method(m->name(), m->signature());
1159     if (lm == NULL && ctxk->is_instance_klass()) {
1160       // It might be an interface method
1161       lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(),
1162                                                                           m->signature());
1163     }
1164     if (lm == m)
1165       // Method m is inherited into ctxk.
1166       return true;
1167     if (lm != NULL) {
1168       if (!(lm->is_public() || lm->is_protected())) {
1169         // Method is [package-]private, so the override story is complex.
1170         return true;  // Must punt the assertion to true.
1171       }
1172       if (lm->is_static()) {
1173         // Static methods don't override non-static so punt
1174         return true;
1175       }
1176       if (!Dependencies::is_concrete_method(lm, ctxk) &&
1177           !Dependencies::is_concrete_method(m, ctxk)) {
1178         // They are both non-concrete
1179         if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1180           // Method m is overridden by lm, but both are non-concrete.
1181           return true;
1182         }
1183         if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1184             ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1185           // Interface method defined in multiple super interfaces
1186           return true;
1187         }
1188       }
1189     }
1190     ResourceMark rm;
1191     tty->print_cr("Dependency method not found in the associated context:");
1192     tty->print_cr("  context = %s", ctxk->external_name());
1193     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
1194     if (lm != NULL) {
1195       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
1196     }
1197     return false;
1198   }
1199 #endif
1200 
1201   void add_participant(Klass* participant) {
1202     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1203     int np = _num_participants++;
1204     _participants[np] = participant;
1205     _participants[np+1] = NULL;
1206     _found_methods[np+1] = NULL;
1207   }
1208 
1209   void record_witnesses(int add) {
1210     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
1211     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1212     _record_witnesses = add;
1213   }
1214 
1215   bool is_witness(Klass* k) {
1216     if (doing_subtype_search()) {
1217       return Dependencies::is_concrete_klass(k);
1218     } else if (!k->is_instance_klass()) {
1219       return false; // no methods to find in an array type
1220     } else {
1221       // Search class hierarchy first.
1222       Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature);
1223       if (!Dependencies::is_concrete_method(m, k)) {
1224         // Check interface defaults also, if any exist.
1225         Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
1226         if (default_methods == NULL)
1227             return false;
1228         m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
1229         if (!Dependencies::is_concrete_method(m, NULL))
1230             return false;
1231       }
1232       _found_methods[_num_participants] = m;
1233       // Note:  If add_participant(k) is called,
1234       // the method m will already be memoized for it.
1235       return true;
1236     }
1237   }
1238 
1239   bool is_participant(Klass* k) {
1240     if (k == _participants[0]) {
1241       return true;
1242     } else if (_num_participants <= 1) {
1243       return false;
1244     } else {
1245       return in_list(k, &_participants[1]);
1246     }
1247   }
1248   bool ignore_witness(Klass* witness) {
1249     if (_record_witnesses == 0) {
1250       return false;
1251     } else {
1252       --_record_witnesses;
1253       add_participant(witness);
1254       return true;
1255     }
1256   }
1257   static bool in_list(Klass* x, Klass** list) {
1258     for (int i = 0; ; i++) {
1259       Klass* y = list[i];
1260       if (y == NULL)  break;
1261       if (y == x)  return true;
1262     }
1263     return false;  // not in list
1264   }
1265 
1266  private:
1267   // the actual search method:
1268   Klass* find_witness_anywhere(Klass* context_type,
1269                                  bool participants_hide_witnesses,
1270                                  bool top_level_call = true);
1271   // the spot-checking version:
1272   Klass* find_witness_in(KlassDepChange& changes,
1273                          Klass* context_type,
1274                            bool participants_hide_witnesses);
1275  public:
1276   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
1277     assert(doing_subtype_search(), "must set up a subtype search");
1278     // When looking for unexpected concrete types,
1279     // do not look beneath expected ones.
1280     const bool participants_hide_witnesses = true;
1281     // CX > CC > C' is OK, even if C' is new.
1282     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
1283     if (changes != NULL) {
1284       return find_witness_in(*changes, context_type, participants_hide_witnesses);
1285     } else {
1286       return find_witness_anywhere(context_type, participants_hide_witnesses);
1287     }
1288   }
1289   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
1290     assert(!doing_subtype_search(), "must set up a method definer search");
1291     // When looking for unexpected concrete methods,
1292     // look beneath expected ones, to see if there are overrides.
1293     const bool participants_hide_witnesses = true;
1294     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1295     if (changes != NULL) {
1296       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
1297     } else {
1298       return find_witness_anywhere(context_type, !participants_hide_witnesses);
1299     }
1300   }
1301 };
1302 
1303 #ifndef PRODUCT
1304 static int deps_find_witness_calls = 0;
1305 static int deps_find_witness_steps = 0;
1306 static int deps_find_witness_recursions = 0;
1307 static int deps_find_witness_singles = 0;
1308 static int deps_find_witness_print = 0; // set to -1 to force a final print
1309 static bool count_find_witness_calls() {
1310   if (TraceDependencies || LogCompilation) {
1311     int pcount = deps_find_witness_print + 1;
1312     bool final_stats      = (pcount == 0);
1313     bool initial_call     = (pcount == 1);
1314     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1315     if (pcount < 0)  pcount = 1; // crude overflow protection
1316     deps_find_witness_print = pcount;
1317     if (VerifyDependencies && initial_call) {
1318       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1319     }
1320     if (occasional_print || final_stats) {
1321       // Every now and then dump a little info about dependency searching.
1322       if (xtty != NULL) {
1323        ttyLocker ttyl;
1324        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1325                    deps_find_witness_calls,
1326                    deps_find_witness_steps,
1327                    deps_find_witness_recursions,
1328                    deps_find_witness_singles);
1329       }
1330       if (final_stats || (TraceDependencies && WizardMode)) {
1331         ttyLocker ttyl;
1332         tty->print_cr("Dependency check (find_witness) "
1333                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1334                       deps_find_witness_calls,
1335                       deps_find_witness_steps,
1336                       (double)deps_find_witness_steps / deps_find_witness_calls,
1337                       deps_find_witness_recursions,
1338                       deps_find_witness_singles);
1339       }
1340     }
1341     return true;
1342   }
1343   return false;
1344 }
1345 #else
1346 #define count_find_witness_calls() (0)
1347 #endif //PRODUCT
1348 
1349 
1350 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1351                                                Klass* context_type,
1352                                                bool participants_hide_witnesses) {
1353   assert(changes.involves_context(context_type), "irrelevant dependency");
1354   Klass* new_type = changes.new_type();
1355 
1356   (void)count_find_witness_calls();
1357   NOT_PRODUCT(deps_find_witness_singles++);
1358 
1359   // Current thread must be in VM (not native mode, as in CI):
1360   assert(must_be_in_vm(), "raw oops here");
1361   // Must not move the class hierarchy during this check:
1362   assert_locked_or_safepoint(Compile_lock);
1363 
1364   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1365   if (nof_impls > 1) {
1366     // Avoid this case: *I.m > { A.m, C }; B.m > C
1367     // %%% Until this is fixed more systematically, bail out.
1368     // See corresponding comment in find_witness_anywhere.
1369     return context_type;
1370   }
1371 
1372   assert(!is_participant(new_type), "only old classes are participants");
1373   if (participants_hide_witnesses) {
1374     // If the new type is a subtype of a participant, we are done.
1375     for (int i = 0; i < num_participants(); i++) {
1376       Klass* part = participant(i);
1377       if (part == NULL)  continue;
1378       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1379              "correct marking of participants, b/c new_type is unique");
1380       if (changes.involves_context(part)) {
1381         // new guy is protected from this check by previous participant
1382         return NULL;
1383       }
1384     }
1385   }
1386 
1387   if (is_witness(new_type) &&
1388       !ignore_witness(new_type)) {
1389     return new_type;
1390   }
1391 
1392   return NULL;
1393 }
1394 
1395 
1396 // Walk hierarchy under a context type, looking for unexpected types.
1397 // Do not report participant types, and recursively walk beneath
1398 // them only if participants_hide_witnesses is false.
1399 // If top_level_call is false, skip testing the context type,
1400 // because the caller has already considered it.
1401 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1402                                                      bool participants_hide_witnesses,
1403                                                      bool top_level_call) {
1404   // Current thread must be in VM (not native mode, as in CI):
1405   assert(must_be_in_vm(), "raw oops here");
1406   // Must not move the class hierarchy during this check:
1407   assert_locked_or_safepoint(Compile_lock);
1408 
1409   bool do_counts = count_find_witness_calls();
1410 
1411   // Check the root of the sub-hierarchy first.
1412   if (top_level_call) {
1413     if (do_counts) {
1414       NOT_PRODUCT(deps_find_witness_calls++);
1415       NOT_PRODUCT(deps_find_witness_steps++);
1416     }
1417     if (is_participant(context_type)) {
1418       if (participants_hide_witnesses)  return NULL;
1419       // else fall through to search loop...
1420     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1421       // The context is an abstract class or interface, to start with.
1422       return context_type;
1423     }
1424   }
1425 
1426   // Now we must check each implementor and each subclass.
1427   // Use a short worklist to avoid blowing the stack.
1428   // Each worklist entry is a *chain* of subklass siblings to process.
1429   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1430   Klass* chains[CHAINMAX];
1431   int    chaini = 0;  // index into worklist
1432   Klass* chain;       // scratch variable
1433 #define ADD_SUBCLASS_CHAIN(k)                     {  \
1434     assert(chaini < CHAINMAX, "oob");                \
1435     chain = k->subklass();                           \
1436     if (chain != NULL)  chains[chaini++] = chain;    }
1437 
1438   // Look for non-abstract subclasses.
1439   // (Note:  Interfaces do not have subclasses.)
1440   ADD_SUBCLASS_CHAIN(context_type);
1441 
1442   // If it is an interface, search its direct implementors.
1443   // (Their subclasses are additional indirect implementors.
1444   // See InstanceKlass::add_implementor.)
1445   // (Note:  nof_implementors is always zero for non-interfaces.)
1446   if (top_level_call) {
1447     int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1448     if (nof_impls > 1) {
1449       // Avoid this case: *I.m > { A.m, C }; B.m > C
1450       // Here, I.m has 2 concrete implementations, but m appears unique
1451       // as A.m, because the search misses B.m when checking C.
1452       // The inherited method B.m was getting missed by the walker
1453       // when interface 'I' was the starting point.
1454       // %%% Until this is fixed more systematically, bail out.
1455       // (Old CHA had the same limitation.)
1456       return context_type;
1457     }
1458     if (nof_impls > 0) {
1459       Klass* impl = InstanceKlass::cast(context_type)->implementor();
1460       assert(impl != NULL, "just checking");
1461       // If impl is the same as the context_type, then more than one
1462       // implementor has seen. No exact info in this case.
1463       if (impl == context_type) {
1464         return context_type;  // report an inexact witness to this sad affair
1465       }
1466       if (do_counts)
1467         { NOT_PRODUCT(deps_find_witness_steps++); }
1468       if (is_participant(impl)) {
1469         if (!participants_hide_witnesses) {
1470           ADD_SUBCLASS_CHAIN(impl);
1471         }
1472       } else if (is_witness(impl) && !ignore_witness(impl)) {
1473         return impl;
1474       } else {
1475         ADD_SUBCLASS_CHAIN(impl);
1476       }
1477     }
1478   }
1479 
1480   // Recursively process each non-trivial sibling chain.
1481   while (chaini > 0) {
1482     Klass* chain = chains[--chaini];
1483     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1484       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1485       if (is_participant(sub)) {
1486         if (participants_hide_witnesses)  continue;
1487         // else fall through to process this guy's subclasses
1488       } else if (is_witness(sub) && !ignore_witness(sub)) {
1489         return sub;
1490       }
1491       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1492         // Fast path.  (Partially disabled if VerifyDependencies.)
1493         ADD_SUBCLASS_CHAIN(sub);
1494       } else {
1495         // Worklist overflow.  Do a recursive call.  Should be rare.
1496         // The recursive call will have its own worklist, of course.
1497         // (Note that sub has already been tested, so that there is
1498         // no need for the recursive call to re-test.  That's handy,
1499         // since the recursive call sees sub as the context_type.)
1500         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1501         Klass* witness = find_witness_anywhere(sub,
1502                                                  participants_hide_witnesses,
1503                                                  /*top_level_call=*/ false);
1504         if (witness != NULL)  return witness;
1505       }
1506     }
1507   }
1508 
1509   // No witness found.  The dependency remains unbroken.
1510   return NULL;
1511 #undef ADD_SUBCLASS_CHAIN
1512 }
1513 
1514 
1515 bool Dependencies::is_concrete_klass(Klass* k) {
1516   if (k->is_abstract())  return false;
1517   // %%% We could treat classes which are concrete but
1518   // have not yet been instantiated as virtually abstract.
1519   // This would require a deoptimization barrier on first instantiation.
1520   //if (k->is_not_instantiated())  return false;
1521   return true;
1522 }
1523 
1524 bool Dependencies::is_concrete_method(Method* m, Klass * k) {
1525   // NULL is not a concrete method,
1526   // statics are irrelevant to virtual call sites,
1527   // abstract methods are not concrete,
1528   // overpass (error) methods are not concrete if k is abstract
1529   //
1530   // note "true" is conservative answer --
1531   //     overpass clause is false if k == NULL, implies return true if
1532   //     answer depends on overpass clause.
1533   return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
1534              (m->is_overpass() && k != NULL && k -> is_abstract()) );
1535 }
1536 
1537 
1538 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1539   if (k->is_interface())  return NULL;
1540   if (k->has_finalizer()) return k;
1541   k = k->subklass();
1542   while (k != NULL) {
1543     Klass* result = find_finalizable_subclass(k);
1544     if (result != NULL) return result;
1545     k = k->next_sibling();
1546   }
1547   return NULL;
1548 }
1549 
1550 
1551 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1552   if (k->is_abstract())  return false;
1553   // We could also return false if k does not yet appear to be
1554   // instantiated, if the VM version supports this distinction also.
1555   //if (k->is_not_instantiated())  return false;
1556   return true;
1557 }
1558 
1559 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1560   return k->has_finalizable_subclass();
1561 }
1562 
1563 
1564 // Any use of the contents (bytecodes) of a method must be
1565 // marked by an "evol_method" dependency, if those contents
1566 // can change.  (Note: A method is always dependent on itself.)
1567 Klass* Dependencies::check_evol_method(Method* m) {
1568   assert(must_be_in_vm(), "raw oops here");
1569   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1570   // Or is there a now a breakpoint?
1571   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1572   if (m->is_old()
1573       || m->number_of_breakpoints() > 0) {
1574     return m->method_holder();
1575   } else {
1576     return NULL;
1577   }
1578 }
1579 
1580 // This is a strong assertion:  It is that the given type
1581 // has no subtypes whatever.  It is most useful for
1582 // optimizing checks on reflected types or on array types.
1583 // (Checks on types which are derived from real instances
1584 // can be optimized more strongly than this, because we
1585 // know that the checked type comes from a concrete type,
1586 // and therefore we can disregard abstract types.)
1587 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1588   assert(must_be_in_vm(), "raw oops here");
1589   assert_locked_or_safepoint(Compile_lock);
1590   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1591   Klass* sub = ctx->subklass();
1592   if (sub != NULL) {
1593     return sub;
1594   } else if (ctx->nof_implementors() != 0) {
1595     // if it is an interface, it must be unimplemented
1596     // (if it is not an interface, nof_implementors is always zero)
1597     Klass* impl = ctx->implementor();
1598     assert(impl != NULL, "must be set");
1599     return impl;
1600   } else {
1601     return NULL;
1602   }
1603 }
1604 
1605 // Test the assertion that conck is the only concrete subtype* of ctxk.
1606 // The type conck itself is allowed to have have further concrete subtypes.
1607 // This allows the compiler to narrow occurrences of ctxk by conck,
1608 // when dealing with the types of actual instances.
1609 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1610                                                                    Klass* conck,
1611                                                                    KlassDepChange* changes) {
1612   ClassHierarchyWalker wf(conck);
1613   return wf.find_witness_subtype(ctxk, changes);
1614 }
1615 
1616 // If a non-concrete class has no concrete subtypes, it is not (yet)
1617 // instantiatable.  This can allow the compiler to make some paths go
1618 // dead, if they are gated by a test of the type.
1619 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1620                                                                KlassDepChange* changes) {
1621   // Find any concrete subtype, with no participants:
1622   ClassHierarchyWalker wf;
1623   return wf.find_witness_subtype(ctxk, changes);
1624 }
1625 
1626 
1627 // If a concrete class has no concrete subtypes, it can always be
1628 // exactly typed.  This allows the use of a cheaper type test.
1629 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1630                                                                KlassDepChange* changes) {
1631   // Find any concrete subtype, with only the ctxk as participant:
1632   ClassHierarchyWalker wf(ctxk);
1633   return wf.find_witness_subtype(ctxk, changes);
1634 }
1635 
1636 
1637 // Find the unique concrete proper subtype of ctxk, or NULL if there
1638 // is more than one concrete proper subtype.  If there are no concrete
1639 // proper subtypes, return ctxk itself, whether it is concrete or not.
1640 // The returned subtype is allowed to have have further concrete subtypes.
1641 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1642 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1643   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1644   wf.record_witnesses(1);          // Record one other witness when walking.
1645   Klass* wit = wf.find_witness_subtype(ctxk);
1646   if (wit != NULL)  return NULL;   // Too many witnesses.
1647   Klass* conck = wf.participant(0);
1648   if (conck == NULL) {
1649 #ifndef PRODUCT
1650     // Make sure the dependency mechanism will pass this discovery:
1651     if (VerifyDependencies) {
1652       // Turn off dependency tracing while actually testing deps.
1653       FlagSetting fs(TraceDependencies, false);
1654       if (!Dependencies::is_concrete_klass(ctxk)) {
1655         guarantee(NULL ==
1656                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
1657                   "verify dep.");
1658       } else {
1659         guarantee(NULL ==
1660                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
1661                   "verify dep.");
1662       }
1663     }
1664 #endif //PRODUCT
1665     return ctxk;                   // Return ctxk as a flag for "no subtypes".
1666   } else {
1667 #ifndef PRODUCT
1668     // Make sure the dependency mechanism will pass this discovery:
1669     if (VerifyDependencies) {
1670       // Turn off dependency tracing while actually testing deps.
1671       FlagSetting fs(TraceDependencies, false);
1672       if (!Dependencies::is_concrete_klass(ctxk)) {
1673         guarantee(NULL == (void *)
1674                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
1675                   "verify dep.");
1676       }
1677     }
1678 #endif //PRODUCT
1679     return conck;
1680   }
1681 }
1682 
1683 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1684 // except possibly for further subtypes of k[12] themselves.
1685 // The context type must be abstract.  The types k1 and k2 are themselves
1686 // allowed to have further concrete subtypes.
1687 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1688                                                 Klass* ctxk,
1689                                                 Klass* k1,
1690                                                 Klass* k2,
1691                                                 KlassDepChange* changes) {
1692   ClassHierarchyWalker wf;
1693   wf.add_participant(k1);
1694   wf.add_participant(k2);
1695   return wf.find_witness_subtype(ctxk, changes);
1696 }
1697 
1698 // Search ctxk for concrete implementations.  If there are klen or fewer,
1699 // pack them into the given array and return the number.
1700 // Otherwise, return -1, meaning the given array would overflow.
1701 // (Note that a return of 0 means there are exactly no concrete subtypes.)
1702 // In this search, if ctxk is concrete, it will be reported alone.
1703 // For any type CC reported, no proper subtypes of CC will be reported.
1704 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1705                                                    int klen,
1706                                                    Klass* karray[]) {
1707   ClassHierarchyWalker wf;
1708   wf.record_witnesses(klen);
1709   Klass* wit = wf.find_witness_subtype(ctxk);
1710   if (wit != NULL)  return -1;  // Too many witnesses.
1711   int num = wf.num_participants();
1712   assert(num <= klen, "oob");
1713   // Pack the result array with the good news.
1714   for (int i = 0; i < num; i++)
1715     karray[i] = wf.participant(i);
1716 #ifndef PRODUCT
1717   // Make sure the dependency mechanism will pass this discovery:
1718   if (VerifyDependencies) {
1719     // Turn off dependency tracing while actually testing deps.
1720     FlagSetting fs(TraceDependencies, false);
1721     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1722     case -1: // ctxk was itself concrete
1723       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1724       break;
1725     case 0:
1726       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1727                 "verify dep.");
1728       break;
1729     case 1:
1730       guarantee(NULL == (void *)
1731                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1732                 "verify dep.");
1733       break;
1734     case 2:
1735       guarantee(NULL == (void *)
1736                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
1737                                                                 karray[0],
1738                                                                 karray[1]),
1739                 "verify dep.");
1740       break;
1741     default:
1742       ShouldNotReachHere();  // klen > 2 yet supported
1743     }
1744   }
1745 #endif //PRODUCT
1746   return num;
1747 }
1748 
1749 // If a class (or interface) has a unique concrete method uniqm, return NULL.
1750 // Otherwise, return a class that contains an interfering method.
1751 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1752                                                     KlassDepChange* changes) {
1753   // Here is a missing optimization:  If uniqm->is_final(),
1754   // we don't really need to search beneath it for overrides.
1755   // This is probably not important, since we don't use dependencies
1756   // to track final methods.  (They can't be "definalized".)
1757   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1758   return wf.find_witness_definer(ctxk, changes);
1759 }
1760 
1761 // Find the set of all non-abstract methods under ctxk that match m.
1762 // (The method m must be defined or inherited in ctxk.)
1763 // Include m itself in the set, unless it is abstract.
1764 // If this set has exactly one element, return that element.
1765 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1766   // Return NULL if m is marked old; must have been a redefined method.
1767   if (m->is_old()) {
1768     return NULL;
1769   }
1770   ClassHierarchyWalker wf(m);
1771   assert(wf.check_method_context(ctxk, m), "proper context");
1772   wf.record_witnesses(1);
1773   Klass* wit = wf.find_witness_definer(ctxk);
1774   if (wit != NULL)  return NULL;  // Too many witnesses.
1775   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1776   if (Dependencies::is_concrete_method(m, ctxk)) {
1777     if (fm == NULL) {
1778       // It turns out that m was always the only implementation.
1779       fm = m;
1780     } else if (fm != m) {
1781       // Two conflicting implementations after all.
1782       // (This can happen if m is inherited into ctxk and fm overrides it.)
1783       return NULL;
1784     }
1785   }
1786 #ifndef PRODUCT
1787   // Make sure the dependency mechanism will pass this discovery:
1788   if (VerifyDependencies && fm != NULL) {
1789     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1790               "verify dep.");
1791   }
1792 #endif //PRODUCT
1793   return fm;
1794 }
1795 
1796 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1797                                                         Method* m1,
1798                                                         Method* m2,
1799                                                         KlassDepChange* changes) {
1800   ClassHierarchyWalker wf(m1);
1801   wf.add_participant(m1->method_holder());
1802   wf.add_participant(m2->method_holder());
1803   return wf.find_witness_definer(ctxk, changes);
1804 }
1805 
1806 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1807   Klass* search_at = ctxk;
1808   if (changes != NULL)
1809     search_at = changes->new_type(); // just look at the new bit
1810   return find_finalizable_subclass(search_at);
1811 }
1812 
1813 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1814   assert(!oopDesc::is_null(call_site), "sanity");
1815   assert(!oopDesc::is_null(method_handle), "sanity");
1816   assert(call_site->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1817 
1818   if (changes == NULL) {
1819     // Validate all CallSites
1820     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1821       return call_site->klass();  // assertion failed
1822   } else {
1823     // Validate the given CallSite
1824     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1825       assert(method_handle != changes->method_handle(), "must be");
1826       return call_site->klass();  // assertion failed
1827     }
1828   }
1829   return NULL;  // assertion still valid
1830 }
1831 
1832 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1833   if (witness != NULL) {
1834     if (TraceDependencies) {
1835       print_dependency(witness, /*verbose=*/ true);
1836     }
1837     // The following is a no-op unless logging is enabled:
1838     log_dependency(witness);
1839   }
1840 }
1841 
1842 
1843 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1844   assert_locked_or_safepoint(Compile_lock);
1845   Dependencies::check_valid_dependency_type(type());
1846 
1847   Klass* witness = NULL;
1848   switch (type()) {
1849   case evol_method:
1850     witness = check_evol_method(method_argument(0));
1851     break;
1852   case leaf_type:
1853     witness = check_leaf_type(context_type());
1854     break;
1855   case abstract_with_unique_concrete_subtype:
1856     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1857     break;
1858   case abstract_with_no_concrete_subtype:
1859     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1860     break;
1861   case concrete_with_no_concrete_subtype:
1862     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1863     break;
1864   case unique_concrete_method:
1865     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1866     break;
1867   case abstract_with_exclusive_concrete_subtypes_2:
1868     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1869     break;
1870   case exclusive_concrete_methods_2:
1871     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1872     break;
1873   case no_finalizable_subclasses:
1874     witness = check_has_no_finalizable_subclasses(context_type(), changes);
1875     break;
1876   default:
1877     witness = NULL;
1878     break;
1879   }
1880   trace_and_log_witness(witness);
1881   return witness;
1882 }
1883 
1884 
1885 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1886   assert_locked_or_safepoint(Compile_lock);
1887   Dependencies::check_valid_dependency_type(type());
1888 
1889   Klass* witness = NULL;
1890   switch (type()) {
1891   case call_site_target_value:
1892     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1893     break;
1894   default:
1895     witness = NULL;
1896     break;
1897   }
1898   trace_and_log_witness(witness);
1899   return witness;
1900 }
1901 
1902 
1903 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1904   // Handle klass dependency
1905   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1906     return check_klass_dependency(changes.as_klass_change());
1907 
1908   // Handle CallSite dependency
1909   if (changes.is_call_site_change())
1910     return check_call_site_dependency(changes.as_call_site_change());
1911 
1912   // irrelevant dependency; skip it
1913   return NULL;
1914 }
1915 
1916 
1917 void DepChange::print() {
1918   int nsup = 0, nint = 0;
1919   for (ContextStream str(*this); str.next(); ) {
1920     Klass* k = str.klass();
1921     switch (str.change_type()) {
1922     case Change_new_type:
1923       tty->print_cr("  dependee = %s", k->external_name());
1924       break;
1925     case Change_new_sub:
1926       if (!WizardMode) {
1927         ++nsup;
1928       } else {
1929         tty->print_cr("  context super = %s", k->external_name());
1930       }
1931       break;
1932     case Change_new_impl:
1933       if (!WizardMode) {
1934         ++nint;
1935       } else {
1936         tty->print_cr("  context interface = %s", k->external_name());
1937       }
1938       break;
1939     default:
1940       break;
1941     }
1942   }
1943   if (nsup + nint != 0) {
1944     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1945   }
1946 }
1947 
1948 void DepChange::ContextStream::start() {
1949   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1950   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1951   _klass = new_type;
1952   _ti_base = NULL;
1953   _ti_index = 0;
1954   _ti_limit = 0;
1955 }
1956 
1957 bool DepChange::ContextStream::next() {
1958   switch (_change_type) {
1959   case Start_Klass:             // initial state; _klass is the new type
1960     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1961     _ti_index = 0;
1962     _change_type = Change_new_type;
1963     return true;
1964   case Change_new_type:
1965     // fall through:
1966     _change_type = Change_new_sub;
1967   case Change_new_sub:
1968     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1969     {
1970       _klass = _klass->super();
1971       if (_klass != NULL) {
1972         return true;
1973       }
1974     }
1975     // else set up _ti_limit and fall through:
1976     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1977     _change_type = Change_new_impl;
1978   case Change_new_impl:
1979     if (_ti_index < _ti_limit) {
1980       _klass = _ti_base->at(_ti_index++);
1981       return true;
1982     }
1983     // fall through:
1984     _change_type = NO_CHANGE;  // iterator is exhausted
1985   case NO_CHANGE:
1986     break;
1987   default:
1988     ShouldNotReachHere();
1989   }
1990   return false;
1991 }
1992 
1993 void KlassDepChange::initialize() {
1994   // entire transaction must be under this lock:
1995   assert_lock_strong(Compile_lock);
1996 
1997   // Mark all dependee and all its superclasses
1998   // Mark transitive interfaces
1999   for (ContextStream str(*this); str.next(); ) {
2000     Klass* d = str.klass();
2001     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
2002     InstanceKlass::cast(d)->set_is_marked_dependent(true);
2003   }
2004 }
2005 
2006 KlassDepChange::~KlassDepChange() {
2007   // Unmark all dependee and all its superclasses
2008   // Unmark transitive interfaces
2009   for (ContextStream str(*this); str.next(); ) {
2010     Klass* d = str.klass();
2011     InstanceKlass::cast(d)->set_is_marked_dependent(false);
2012   }
2013 }
2014 
2015 bool KlassDepChange::involves_context(Klass* k) {
2016   if (k == NULL || !k->is_instance_klass()) {
2017     return false;
2018   }
2019   InstanceKlass* ik = InstanceKlass::cast(k);
2020   bool is_contained = ik->is_marked_dependent();
2021   assert(is_contained == new_type()->is_subtype_of(k),
2022          "correct marking of potential context types");
2023   return is_contained;
2024 }
2025 
2026 #ifndef PRODUCT
2027 void Dependencies::print_statistics() {
2028   if (deps_find_witness_print != 0) {
2029     // Call one final time, to flush out the data.
2030     deps_find_witness_print = -1;
2031     count_find_witness_calls();
2032   }
2033 }
2034 #endif
2035 
2036 CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
2037   _call_site(call_site),
2038   _method_handle(method_handle) {
2039   assert(_call_site()->is_a(SystemDictionary::CallSite_klass()), "must be");
2040   assert(_method_handle.is_null() || _method_handle()->is_a(SystemDictionary::MethodHandle_klass()), "must be");
2041 }