1 /*
   2  * Copyright (c) 2005, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciArrayKlass.hpp"
  27 #include "ci/ciEnv.hpp"
  28 #include "ci/ciKlass.hpp"
  29 #include "ci/ciMethod.hpp"
  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "code/dependencies.hpp"
  32 #include "compiler/compileLog.hpp"
  33 #include "compiler/compileBroker.hpp"
  34 #include "compiler/compileTask.hpp"
  35 #include "memory/resourceArea.hpp"
  36 #include "oops/oop.inline.hpp"
  37 #include "oops/objArrayKlass.hpp"
  38 #include "runtime/handles.hpp"
  39 #include "runtime/handles.inline.hpp"
  40 #include "runtime/jniHandles.inline.hpp"
  41 #include "runtime/thread.inline.hpp"
  42 #include "utilities/copy.hpp"
  43 
  44 
  45 #ifdef ASSERT
  46 static bool must_be_in_vm() {
  47   Thread* thread = Thread::current();
  48   if (thread->is_Java_thread())
  49     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  50   else
  51     return true;  //something like this: thread->is_VM_thread();
  52 }
  53 #endif //ASSERT
  54 
  55 void Dependencies::initialize(ciEnv* env) {
  56   Arena* arena = env->arena();
  57   _oop_recorder = env->oop_recorder();
  58   _log = env->log();
  59   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  60 #if INCLUDE_JVMCI
  61   _using_dep_values = false;
  62 #endif
  63   DEBUG_ONLY(_deps[end_marker] = NULL);
  64   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  65     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  66   }
  67   _content_bytes = NULL;
  68   _size_in_bytes = (size_t)-1;
  69 
  70   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  71 }
  72 
  73 void Dependencies::assert_evol_method(ciMethod* m) {
  74   assert_common_1(evol_method, m);
  75 }
  76 
  77 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  78   if (ctxk->is_array_klass()) {
  79     // As a special case, support this assertion on an array type,
  80     // which reduces to an assertion on its element type.
  81     // Note that this cannot be done with assertions that
  82     // relate to concreteness or abstractness.
  83     ciType* elemt = ctxk->as_array_klass()->base_element_type();
  84     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
  85     ctxk = elemt->as_instance_klass();
  86     //if (ctxk->is_final())  return;            // Ex:  String[][]
  87   }
  88   check_ctxk(ctxk);
  89   assert_common_1(leaf_type, ctxk);
  90 }
  91 
  92 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  93   check_ctxk_abstract(ctxk);
  94   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  95 }
  96 
  97 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
  98   check_ctxk_abstract(ctxk);
  99   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
 100 }
 101 
 102 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
 103   check_ctxk_concrete(ctxk);
 104   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
 105 }
 106 
 107 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
 108   check_ctxk(ctxk);
 109   assert_common_2(unique_concrete_method, ctxk, uniqm);
 110 }
 111 
 112 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
 113   check_ctxk(ctxk);
 114   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
 115 }
 116 
 117 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
 118   check_ctxk(ctxk);
 119   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
 120 }
 121 
 122 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
 123   check_ctxk(ctxk);
 124   assert_common_1(no_finalizable_subclasses, ctxk);
 125 }
 126 
 127 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
 128   assert_common_2(call_site_target_value, call_site, method_handle);
 129 }
 130 
 131 #if INCLUDE_JVMCI
 132 
 133 Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
 134   _oop_recorder = oop_recorder;
 135   _log = log;
 136   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
 137   _using_dep_values = true;
 138   DEBUG_ONLY(_dep_values[end_marker] = NULL);
 139   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
 140     _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
 141   }
 142   _content_bytes = NULL;
 143   _size_in_bytes = (size_t)-1;
 144 
 145   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
 146 }
 147 
 148 void Dependencies::assert_evol_method(Method* m) {
 149   assert_common_1(evol_method, DepValue(_oop_recorder, m));
 150 }
 151 
 152 void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
 153   check_ctxk(ctxk);
 154   assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
 155 }
 156 
 157 void Dependencies::assert_leaf_type(Klass* ctxk) {
 158   if (ctxk->is_array_klass()) {
 159     // As a special case, support this assertion on an array type,
 160     // which reduces to an assertion on its element type.
 161     // Note that this cannot be done with assertions that
 162     // relate to concreteness or abstractness.
 163     BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
 164     if (is_java_primitive(elemt))  return;   // Ex:  int[][]
 165     ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
 166     //if (ctxk->is_final())  return;            // Ex:  String[][]
 167   }
 168   check_ctxk(ctxk);
 169   assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
 170 }
 171 
 172 void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
 173   check_ctxk_abstract(ctxk);
 174   DepValue ctxk_dv(_oop_recorder, ctxk);
 175   DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
 176   assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
 177 }
 178 
 179 void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
 180   check_ctxk(ctxk);
 181   assert_common_2(unique_concrete_method, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
 182 }
 183 
 184 void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
 185   assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
 186 }
 187 
 188 #endif // INCLUDE_JVMCI
 189 
 190 
 191 // Helper function.  If we are adding a new dep. under ctxk2,
 192 // try to find an old dep. under a broader* ctxk1.  If there is
 193 //
 194 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
 195                                     int ctxk_i, ciKlass* ctxk2) {
 196   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
 197   if (ctxk2->is_subtype_of(ctxk1)) {
 198     return true;  // success, and no need to change
 199   } else if (ctxk1->is_subtype_of(ctxk2)) {
 200     // new context class fully subsumes previous one
 201     deps->at_put(ctxk_i, ctxk2);
 202     return true;
 203   } else {
 204     return false;
 205   }
 206 }
 207 
 208 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
 209   assert(dep_args(dept) == 1, "sanity");
 210   log_dependency(dept, x);
 211   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 212 
 213   // see if the same (or a similar) dep is already recorded
 214   if (note_dep_seen(dept, x)) {
 215     assert(deps->find(x) >= 0, "sanity");
 216   } else {
 217     deps->append(x);
 218   }
 219 }
 220 
 221 void Dependencies::assert_common_2(DepType dept,
 222                                    ciBaseObject* x0, ciBaseObject* x1) {
 223   assert(dep_args(dept) == 2, "sanity");
 224   log_dependency(dept, x0, x1);
 225   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 226 
 227   // see if the same (or a similar) dep is already recorded
 228   bool has_ctxk = has_explicit_context_arg(dept);
 229   if (has_ctxk) {
 230     assert(dep_context_arg(dept) == 0, "sanity");
 231     if (note_dep_seen(dept, x1)) {
 232       // look in this bucket for redundant assertions
 233       const int stride = 2;
 234       for (int i = deps->length(); (i -= stride) >= 0; ) {
 235         ciBaseObject* y1 = deps->at(i+1);
 236         if (x1 == y1) {  // same subject; check the context
 237           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
 238             return;
 239           }
 240         }
 241       }
 242     }
 243   } else {
 244     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 245       // look in this bucket for redundant assertions
 246       const int stride = 2;
 247       for (int i = deps->length(); (i -= stride) >= 0; ) {
 248         ciBaseObject* y0 = deps->at(i+0);
 249         ciBaseObject* y1 = deps->at(i+1);
 250         if (x0 == y0 && x1 == y1) {
 251           return;
 252         }
 253       }
 254     }
 255   }
 256 
 257   // append the assertion in the correct bucket:
 258   deps->append(x0);
 259   deps->append(x1);
 260 }
 261 
 262 void Dependencies::assert_common_3(DepType dept,
 263                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
 264   assert(dep_context_arg(dept) == 0, "sanity");
 265   assert(dep_args(dept) == 3, "sanity");
 266   log_dependency(dept, ctxk, x, x2);
 267   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 268 
 269   // try to normalize an unordered pair:
 270   bool swap = false;
 271   switch (dept) {
 272   case abstract_with_exclusive_concrete_subtypes_2:
 273     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
 274     break;
 275   case exclusive_concrete_methods_2:
 276     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
 277     break;
 278   default:
 279     break;
 280   }
 281   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
 282 
 283   // see if the same (or a similar) dep is already recorded
 284   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
 285     // look in this bucket for redundant assertions
 286     const int stride = 3;
 287     for (int i = deps->length(); (i -= stride) >= 0; ) {
 288       ciBaseObject* y  = deps->at(i+1);
 289       ciBaseObject* y2 = deps->at(i+2);
 290       if (x == y && x2 == y2) {  // same subjects; check the context
 291         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
 292           return;
 293         }
 294       }
 295     }
 296   }
 297   // append the assertion in the correct bucket:
 298   deps->append(ctxk);
 299   deps->append(x);
 300   deps->append(x2);
 301 }
 302 
 303 #if INCLUDE_JVMCI
 304 bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
 305                                     int ctxk_i, DepValue ctxk2_dv) {
 306   Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
 307   Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
 308   if (ctxk2->is_subtype_of(ctxk1)) {
 309     return true;  // success, and no need to change
 310   } else if (ctxk1->is_subtype_of(ctxk2)) {
 311     // new context class fully subsumes previous one
 312     deps->at_put(ctxk_i, ctxk2_dv);
 313     return true;
 314   } else {
 315     return false;
 316   }
 317 }
 318 
 319 void Dependencies::assert_common_1(DepType dept, DepValue x) {
 320   assert(dep_args(dept) == 1, "sanity");
 321   //log_dependency(dept, x);
 322   GrowableArray<DepValue>* deps = _dep_values[dept];
 323 
 324   // see if the same (or a similar) dep is already recorded
 325   if (note_dep_seen(dept, x)) {
 326     assert(deps->find(x) >= 0, "sanity");
 327   } else {
 328     deps->append(x);
 329   }
 330 }
 331 
 332 void Dependencies::assert_common_2(DepType dept,
 333                                    DepValue x0, DepValue x1) {
 334   assert(dep_args(dept) == 2, "sanity");
 335   //log_dependency(dept, x0, x1);
 336   GrowableArray<DepValue>* deps = _dep_values[dept];
 337 
 338   // see if the same (or a similar) dep is already recorded
 339   bool has_ctxk = has_explicit_context_arg(dept);
 340   if (has_ctxk) {
 341     assert(dep_context_arg(dept) == 0, "sanity");
 342     if (note_dep_seen(dept, x1)) {
 343       // look in this bucket for redundant assertions
 344       const int stride = 2;
 345       for (int i = deps->length(); (i -= stride) >= 0; ) {
 346         DepValue y1 = deps->at(i+1);
 347         if (x1 == y1) {  // same subject; check the context
 348           if (maybe_merge_ctxk(deps, i+0, x0)) {
 349             return;
 350           }
 351         }
 352       }
 353     }
 354   } else {
 355     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 356       // look in this bucket for redundant assertions
 357       const int stride = 2;
 358       for (int i = deps->length(); (i -= stride) >= 0; ) {
 359         DepValue y0 = deps->at(i+0);
 360         DepValue y1 = deps->at(i+1);
 361         if (x0 == y0 && x1 == y1) {
 362           return;
 363         }
 364       }
 365     }
 366   }
 367 
 368   // append the assertion in the correct bucket:
 369   deps->append(x0);
 370   deps->append(x1);
 371 }
 372 #endif // INCLUDE_JVMCI
 373 
 374 /// Support for encoding dependencies into an nmethod:
 375 
 376 void Dependencies::copy_to(nmethod* nm) {
 377   address beg = nm->dependencies_begin();
 378   address end = nm->dependencies_end();
 379   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
 380   Copy::disjoint_words((HeapWord*) content_bytes(),
 381                        (HeapWord*) beg,
 382                        size_in_bytes() / sizeof(HeapWord));
 383   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
 384 }
 385 
 386 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
 387   for (int i = 0; i < narg; i++) {
 388     int diff = p1[i]->ident() - p2[i]->ident();
 389     if (diff != 0)  return diff;
 390   }
 391   return 0;
 392 }
 393 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
 394 { return sort_dep(p1, p2, 1); }
 395 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
 396 { return sort_dep(p1, p2, 2); }
 397 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
 398 { return sort_dep(p1, p2, 3); }
 399 
 400 #if INCLUDE_JVMCI
 401 // metadata deps are sorted before object deps
 402 static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
 403   for (int i = 0; i < narg; i++) {
 404     int diff = p1[i].sort_key() - p2[i].sort_key();
 405     if (diff != 0)  return diff;
 406   }
 407   return 0;
 408 }
 409 static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 410 { return sort_dep_value(p1, p2, 1); }
 411 static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 412 { return sort_dep_value(p1, p2, 2); }
 413 static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 414 { return sort_dep_value(p1, p2, 3); }
 415 #endif // INCLUDE_JVMCI
 416 
 417 void Dependencies::sort_all_deps() {
 418 #if INCLUDE_JVMCI
 419   if (_using_dep_values) {
 420     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 421       DepType dept = (DepType)deptv;
 422       GrowableArray<DepValue>* deps = _dep_values[dept];
 423       if (deps->length() <= 1)  continue;
 424       switch (dep_args(dept)) {
 425       case 1: deps->sort(sort_dep_value_arg_1, 1); break;
 426       case 2: deps->sort(sort_dep_value_arg_2, 2); break;
 427       case 3: deps->sort(sort_dep_value_arg_3, 3); break;
 428       default: ShouldNotReachHere(); break;
 429       }
 430     }
 431     return;
 432   }
 433 #endif // INCLUDE_JVMCI
 434   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 435     DepType dept = (DepType)deptv;
 436     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 437     if (deps->length() <= 1)  continue;
 438     switch (dep_args(dept)) {
 439     case 1: deps->sort(sort_dep_arg_1, 1); break;
 440     case 2: deps->sort(sort_dep_arg_2, 2); break;
 441     case 3: deps->sort(sort_dep_arg_3, 3); break;
 442     default: ShouldNotReachHere(); break;
 443     }
 444   }
 445 }
 446 
 447 size_t Dependencies::estimate_size_in_bytes() {
 448   size_t est_size = 100;
 449 #if INCLUDE_JVMCI
 450   if (_using_dep_values) {
 451     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 452       DepType dept = (DepType)deptv;
 453       GrowableArray<DepValue>* deps = _dep_values[dept];
 454       est_size += deps->length() * 2;  // tags and argument(s)
 455     }
 456     return est_size;
 457   }
 458 #endif // INCLUDE_JVMCI
 459   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 460     DepType dept = (DepType)deptv;
 461     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 462     est_size += deps->length()*2;  // tags and argument(s)
 463   }
 464   return est_size;
 465 }
 466 
 467 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
 468   switch (dept) {
 469   case abstract_with_exclusive_concrete_subtypes_2:
 470     return x->as_metadata()->as_klass();
 471   case unique_concrete_method:
 472   case exclusive_concrete_methods_2:
 473     return x->as_metadata()->as_method()->holder();
 474   default:
 475     return NULL;  // let NULL be NULL
 476   }
 477 }
 478 
 479 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
 480   assert(must_be_in_vm(), "raw oops here");
 481   switch (dept) {
 482   case abstract_with_exclusive_concrete_subtypes_2:
 483     assert(x->is_klass(), "sanity");
 484     return (Klass*) x;
 485   case unique_concrete_method:
 486   case exclusive_concrete_methods_2:
 487     assert(x->is_method(), "sanity");
 488     return ((Method*)x)->method_holder();
 489   default:
 490     return NULL;  // let NULL be NULL
 491   }
 492 }
 493 
 494 void Dependencies::encode_content_bytes() {
 495   sort_all_deps();
 496 
 497   // cast is safe, no deps can overflow INT_MAX
 498   CompressedWriteStream bytes((int)estimate_size_in_bytes());
 499 
 500 #if INCLUDE_JVMCI
 501   if (_using_dep_values) {
 502     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 503       DepType dept = (DepType)deptv;
 504       GrowableArray<DepValue>* deps = _dep_values[dept];
 505       if (deps->length() == 0)  continue;
 506       int stride = dep_args(dept);
 507       int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 508       assert(stride > 0, "sanity");
 509       for (int i = 0; i < deps->length(); i += stride) {
 510         jbyte code_byte = (jbyte)dept;
 511         int skipj = -1;
 512         if (ctxkj >= 0 && ctxkj+1 < stride) {
 513           Klass*  ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
 514           DepValue x = deps->at(i+ctxkj+1);  // following argument
 515           if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
 516             skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 517             code_byte |= default_context_type_bit;
 518           }
 519         }
 520         bytes.write_byte(code_byte);
 521         for (int j = 0; j < stride; j++) {
 522           if (j == skipj)  continue;
 523           DepValue v = deps->at(i+j);
 524           int idx = v.index();
 525           bytes.write_int(idx);
 526         }
 527       }
 528     }
 529   } else {
 530 #endif // INCLUDE_JVMCI
 531   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 532     DepType dept = (DepType)deptv;
 533     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 534     if (deps->length() == 0)  continue;
 535     int stride = dep_args(dept);
 536     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 537     assert(stride > 0, "sanity");
 538     for (int i = 0; i < deps->length(); i += stride) {
 539       jbyte code_byte = (jbyte)dept;
 540       int skipj = -1;
 541       if (ctxkj >= 0 && ctxkj+1 < stride) {
 542         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
 543         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
 544         if (ctxk == ctxk_encoded_as_null(dept, x)) {
 545           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 546           code_byte |= default_context_type_bit;
 547         }
 548       }
 549       bytes.write_byte(code_byte);
 550       for (int j = 0; j < stride; j++) {
 551         if (j == skipj)  continue;
 552         ciBaseObject* v = deps->at(i+j);
 553         int idx;
 554         if (v->is_object()) {
 555           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
 556         } else {
 557           ciMetadata* meta = v->as_metadata();
 558           idx = _oop_recorder->find_index(meta->constant_encoding());
 559         }
 560         bytes.write_int(idx);
 561       }
 562     }
 563   }
 564 #if INCLUDE_JVMCI
 565   }
 566 #endif
 567 
 568   // write a sentinel byte to mark the end
 569   bytes.write_byte(end_marker);
 570 
 571   // round it out to a word boundary
 572   while (bytes.position() % sizeof(HeapWord) != 0) {
 573     bytes.write_byte(end_marker);
 574   }
 575 
 576   // check whether the dept byte encoding really works
 577   assert((jbyte)default_context_type_bit != 0, "byte overflow");
 578 
 579   _content_bytes = bytes.buffer();
 580   _size_in_bytes = bytes.position();
 581 }
 582 
 583 
 584 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
 585   "end_marker",
 586   "evol_method",
 587   "leaf_type",
 588   "abstract_with_unique_concrete_subtype",
 589   "abstract_with_no_concrete_subtype",
 590   "concrete_with_no_concrete_subtype",
 591   "unique_concrete_method",
 592   "abstract_with_exclusive_concrete_subtypes_2",
 593   "exclusive_concrete_methods_2",
 594   "no_finalizable_subclasses",
 595   "call_site_target_value"
 596 };
 597 
 598 int Dependencies::_dep_args[TYPE_LIMIT] = {
 599   -1,// end_marker
 600   1, // evol_method m
 601   1, // leaf_type ctxk
 602   2, // abstract_with_unique_concrete_subtype ctxk, k
 603   1, // abstract_with_no_concrete_subtype ctxk
 604   1, // concrete_with_no_concrete_subtype ctxk
 605   2, // unique_concrete_method ctxk, m
 606   3, // unique_concrete_subtypes_2 ctxk, k1, k2
 607   3, // unique_concrete_methods_2 ctxk, m1, m2
 608   1, // no_finalizable_subclasses ctxk
 609   2  // call_site_target_value call_site, method_handle
 610 };
 611 
 612 const char* Dependencies::dep_name(Dependencies::DepType dept) {
 613   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
 614   return _dep_name[dept];
 615 }
 616 
 617 int Dependencies::dep_args(Dependencies::DepType dept) {
 618   if (!dept_in_mask(dept, all_types))  return -1;
 619   return _dep_args[dept];
 620 }
 621 
 622 void Dependencies::check_valid_dependency_type(DepType dept) {
 623   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
 624 }
 625 
 626 Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, bool counter_changed, char** failure_detail) {
 627   // First, check non-klass dependencies as we might return early and
 628   // not check klass dependencies if the system dictionary
 629   // modification counter hasn't changed (see below).
 630   for (Dependencies::DepStream deps(this); deps.next(); ) {
 631     if (deps.is_klass_type())  continue;  // skip klass dependencies
 632     Klass* witness = deps.check_dependency();
 633     if (witness != NULL) {
 634       return deps.type();
 635     }
 636   }
 637 
 638   // Klass dependencies must be checked when the system dictionary
 639   // changes.  If logging is enabled all violated dependences will be
 640   // recorded in the log.  In debug mode check dependencies even if
 641   // the system dictionary hasn't changed to verify that no invalid
 642   // dependencies were inserted.  Any violated dependences in this
 643   // case are dumped to the tty.
 644   if (!counter_changed && !trueInDebug) {
 645     return end_marker;
 646   }
 647 
 648   int klass_violations = 0;
 649   DepType result = end_marker;
 650   for (Dependencies::DepStream deps(this); deps.next(); ) {
 651     if (!deps.is_klass_type())  continue;  // skip non-klass dependencies
 652     Klass* witness = deps.check_dependency();
 653     if (witness != NULL) {
 654       if (klass_violations == 0) {
 655         result = deps.type();
 656         if (failure_detail != NULL && klass_violations == 0) {
 657           // Use a fixed size buffer to prevent the string stream from
 658           // resizing in the context of an inner resource mark.
 659           char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN);
 660           stringStream st(buffer, O_BUFLEN);
 661           deps.print_dependency(witness, true, &st);
 662           *failure_detail = st.as_string();
 663         }
 664       }
 665       klass_violations++;
 666       if (!counter_changed) {
 667         // Dependence failed but counter didn't change.  Log a message
 668         // describing what failed and allow the assert at the end to
 669         // trigger.
 670         deps.print_dependency(witness);
 671       } else if (xtty == NULL) {
 672         // If we're not logging then a single violation is sufficient,
 673         // otherwise we want to log all the dependences which were
 674         // violated.
 675         break;
 676       }
 677     }
 678   }
 679 
 680   if (klass_violations != 0) {
 681 #ifdef ASSERT
 682     if (task != NULL && !counter_changed && !PrintCompilation) {
 683       // Print out the compile task that failed
 684       task->print_tty();
 685     }
 686 #endif
 687     assert(counter_changed, "failed dependencies, but counter didn't change");
 688   }
 689   return result;
 690 }
 691 
 692 // for the sake of the compiler log, print out current dependencies:
 693 void Dependencies::log_all_dependencies() {
 694   if (log() == NULL)  return;
 695   ResourceMark rm;
 696   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 697     DepType dept = (DepType)deptv;
 698     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 699     int deplen = deps->length();
 700     if (deplen == 0) {
 701       continue;
 702     }
 703     int stride = dep_args(dept);
 704     GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
 705     for (int i = 0; i < deps->length(); i += stride) {
 706       for (int j = 0; j < stride; j++) {
 707         // flush out the identities before printing
 708         ciargs->push(deps->at(i+j));
 709       }
 710       write_dependency_to(log(), dept, ciargs);
 711       ciargs->clear();
 712     }
 713     guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
 714   }
 715 }
 716 
 717 void Dependencies::write_dependency_to(CompileLog* log,
 718                                        DepType dept,
 719                                        GrowableArray<DepArgument>* args,
 720                                        Klass* witness) {
 721   if (log == NULL) {
 722     return;
 723   }
 724   ResourceMark rm;
 725   ciEnv* env = ciEnv::current();
 726   GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
 727   for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
 728     DepArgument arg = *it;
 729     if (arg.is_oop()) {
 730       ciargs->push(env->get_object(arg.oop_value()));
 731     } else {
 732       ciargs->push(env->get_metadata(arg.metadata_value()));
 733     }
 734   }
 735   int argslen = ciargs->length();
 736   Dependencies::write_dependency_to(log, dept, ciargs, witness);
 737   guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
 738 }
 739 
 740 void Dependencies::write_dependency_to(CompileLog* log,
 741                                        DepType dept,
 742                                        GrowableArray<ciBaseObject*>* args,
 743                                        Klass* witness) {
 744   if (log == NULL) {
 745     return;
 746   }
 747   ResourceMark rm;
 748   GrowableArray<int>* argids = new GrowableArray<int>(args->length());
 749   for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
 750     ciBaseObject* obj = *it;
 751     if (obj->is_object()) {
 752       argids->push(log->identify(obj->as_object()));
 753     } else {
 754       argids->push(log->identify(obj->as_metadata()));
 755     }
 756   }
 757   if (witness != NULL) {
 758     log->begin_elem("dependency_failed");
 759   } else {
 760     log->begin_elem("dependency");
 761   }
 762   log->print(" type='%s'", dep_name(dept));
 763   const int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 764   if (ctxkj >= 0 && ctxkj < argids->length()) {
 765     log->print(" ctxk='%d'", argids->at(ctxkj));
 766   }
 767   // write remaining arguments, if any.
 768   for (int j = 0; j < argids->length(); j++) {
 769     if (j == ctxkj)  continue;  // already logged
 770     if (j == 1) {
 771       log->print(  " x='%d'",    argids->at(j));
 772     } else {
 773       log->print(" x%d='%d'", j, argids->at(j));
 774     }
 775   }
 776   if (witness != NULL) {
 777     log->object("witness", witness);
 778     log->stamp();
 779   }
 780   log->end_elem();
 781 }
 782 
 783 void Dependencies::write_dependency_to(xmlStream* xtty,
 784                                        DepType dept,
 785                                        GrowableArray<DepArgument>* args,
 786                                        Klass* witness) {
 787   if (xtty == NULL) {
 788     return;
 789   }
 790   Thread* thread = Thread::current();
 791   HandleMark rm(thread);
 792   ttyLocker ttyl;
 793   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 794   if (witness != NULL) {
 795     xtty->begin_elem("dependency_failed");
 796   } else {
 797     xtty->begin_elem("dependency");
 798   }
 799   xtty->print(" type='%s'", dep_name(dept));
 800   if (ctxkj >= 0) {
 801     xtty->object("ctxk", args->at(ctxkj).metadata_value());
 802   }
 803   // write remaining arguments, if any.
 804   for (int j = 0; j < args->length(); j++) {
 805     if (j == ctxkj)  continue;  // already logged
 806     DepArgument arg = args->at(j);
 807     if (j == 1) {
 808       if (arg.is_oop()) {
 809         xtty->object("x", Handle(thread, arg.oop_value()));
 810       } else {
 811         xtty->object("x", arg.metadata_value());
 812       }
 813     } else {
 814       char xn[12]; sprintf(xn, "x%d", j);
 815       if (arg.is_oop()) {
 816         xtty->object(xn, Handle(thread, arg.oop_value()));
 817       } else {
 818         xtty->object(xn, arg.metadata_value());
 819       }
 820     }
 821   }
 822   if (witness != NULL) {
 823     xtty->object("witness", witness);
 824     xtty->stamp();
 825   }
 826   xtty->end_elem();
 827 }
 828 
 829 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
 830                                     Klass* witness, outputStream* st) {
 831   ResourceMark rm;
 832   ttyLocker ttyl;   // keep the following output all in one block
 833   st->print_cr("%s of type %s",
 834                 (witness == NULL)? "Dependency": "Failed dependency",
 835                 dep_name(dept));
 836   // print arguments
 837   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 838   for (int j = 0; j < args->length(); j++) {
 839     DepArgument arg = args->at(j);
 840     bool put_star = false;
 841     if (arg.is_null())  continue;
 842     const char* what;
 843     if (j == ctxkj) {
 844       assert(arg.is_metadata(), "must be");
 845       what = "context";
 846       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
 847     } else if (arg.is_method()) {
 848       what = "method ";
 849       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
 850     } else if (arg.is_klass()) {
 851       what = "class  ";
 852     } else {
 853       what = "object ";
 854     }
 855     st->print("  %s = %s", what, (put_star? "*": ""));
 856     if (arg.is_klass()) {
 857       st->print("%s", ((Klass*)arg.metadata_value())->external_name());
 858     } else if (arg.is_method()) {
 859       ((Method*)arg.metadata_value())->print_value_on(st);
 860     } else if (arg.is_oop()) {
 861       arg.oop_value()->print_value_on(st);
 862     } else {
 863       ShouldNotReachHere(); // Provide impl for this type.
 864     }
 865 
 866     st->cr();
 867   }
 868   if (witness != NULL) {
 869     bool put_star = !Dependencies::is_concrete_klass(witness);
 870     st->print_cr("  witness = %s%s",
 871                   (put_star? "*": ""),
 872                   witness->external_name());
 873   }
 874 }
 875 
 876 void Dependencies::DepStream::log_dependency(Klass* witness) {
 877   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
 878   ResourceMark rm;
 879   const int nargs = argument_count();
 880   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 881   for (int j = 0; j < nargs; j++) {
 882     if (is_oop_argument(j)) {
 883       args->push(argument_oop(j));
 884     } else {
 885       args->push(argument(j));
 886     }
 887   }
 888   int argslen = args->length();
 889   if (_deps != NULL && _deps->log() != NULL) {
 890     if (ciEnv::current() != NULL) {
 891       Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
 892     } else {
 893       // Treat the CompileLog as an xmlstream instead
 894       Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
 895     }
 896   } else {
 897     Dependencies::write_dependency_to(xtty, type(), args, witness);
 898   }
 899   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 900 }
 901 
 902 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) {
 903   ResourceMark rm;
 904   int nargs = argument_count();
 905   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 906   for (int j = 0; j < nargs; j++) {
 907     if (is_oop_argument(j)) {
 908       args->push(argument_oop(j));
 909     } else {
 910       args->push(argument(j));
 911     }
 912   }
 913   int argslen = args->length();
 914   Dependencies::print_dependency(type(), args, witness, st);
 915   if (verbose) {
 916     if (_code != NULL) {
 917       st->print("  code: ");
 918       _code->print_value_on(st);
 919       st->cr();
 920     }
 921   }
 922   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 923 }
 924 
 925 
 926 /// Dependency stream support (decodes dependencies from an nmethod):
 927 
 928 #ifdef ASSERT
 929 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
 930   assert(must_be_in_vm(), "raw oops here");
 931   _byte_limit = byte_limit;
 932   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
 933   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
 934 }
 935 #endif //ASSERT
 936 
 937 bool Dependencies::DepStream::next() {
 938   assert(_type != end_marker, "already at end");
 939   if (_bytes.position() == 0 && _code != NULL
 940       && _code->dependencies_size() == 0) {
 941     // Method has no dependencies at all.
 942     return false;
 943   }
 944   int code_byte = (_bytes.read_byte() & 0xFF);
 945   if (code_byte == end_marker) {
 946     DEBUG_ONLY(_type = end_marker);
 947     return false;
 948   } else {
 949     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
 950     code_byte -= ctxk_bit;
 951     DepType dept = (DepType)code_byte;
 952     _type = dept;
 953     Dependencies::check_valid_dependency_type(dept);
 954     int stride = _dep_args[dept];
 955     assert(stride == dep_args(dept), "sanity");
 956     int skipj = -1;
 957     if (ctxk_bit != 0) {
 958       skipj = 0;  // currently the only context argument is at zero
 959       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
 960     }
 961     for (int j = 0; j < stride; j++) {
 962       _xi[j] = (j == skipj)? 0: _bytes.read_int();
 963     }
 964     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
 965     return true;
 966   }
 967 }
 968 
 969 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
 970   Metadata* o = NULL;
 971   if (_code != NULL) {
 972     o = _code->metadata_at(i);
 973   } else {
 974     o = _deps->oop_recorder()->metadata_at(i);
 975   }
 976   return o;
 977 }
 978 
 979 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
 980   return (_code != NULL)
 981          ? _code->oop_at(i)
 982     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
 983 }
 984 
 985 Metadata* Dependencies::DepStream::argument(int i) {
 986   Metadata* result = recorded_metadata_at(argument_index(i));
 987 
 988   if (result == NULL) { // Explicit context argument can be compressed
 989     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 990     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
 991       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
 992     }
 993   }
 994 
 995   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
 996   return result;
 997 }
 998 
 999 /**
1000  * Returns a unique identifier for each dependency argument.
1001  */
1002 uintptr_t Dependencies::DepStream::get_identifier(int i) {
1003   if (is_oop_argument(i)) {
1004     return (uintptr_t)(oopDesc*)argument_oop(i);
1005   } else {
1006     return (uintptr_t)argument(i);
1007   }
1008 }
1009 
1010 oop Dependencies::DepStream::argument_oop(int i) {
1011   oop result = recorded_oop_at(argument_index(i));
1012   assert(oopDesc::is_oop_or_null(result), "must be");
1013   return result;
1014 }
1015 
1016 Klass* Dependencies::DepStream::context_type() {
1017   assert(must_be_in_vm(), "raw oops here");
1018 
1019   // Most dependencies have an explicit context type argument.
1020   {
1021     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
1022     if (ctxkj >= 0) {
1023       Metadata* k = argument(ctxkj);
1024       assert(k != NULL && k->is_klass(), "type check");
1025       return (Klass*)k;
1026     }
1027   }
1028 
1029   // Some dependencies are using the klass of the first object
1030   // argument as implicit context type.
1031   {
1032     int ctxkj = dep_implicit_context_arg(type());
1033     if (ctxkj >= 0) {
1034       Klass* k = argument_oop(ctxkj)->klass();
1035       assert(k != NULL && k->is_klass(), "type check");
1036       return (Klass*) k;
1037     }
1038   }
1039 
1040   // And some dependencies don't have a context type at all,
1041   // e.g. evol_method.
1042   return NULL;
1043 }
1044 
1045 // ----------------- DependencySignature --------------------------------------
1046 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
1047   if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
1048     return false;
1049   }
1050 
1051   for (int i = 0; i < s1.args_count(); i++) {
1052     if (s1.arg(i) != s2.arg(i)) {
1053       return false;
1054     }
1055   }
1056   return true;
1057 }
1058 
1059 /// Checking dependencies:
1060 
1061 // This hierarchy walker inspects subtypes of a given type,
1062 // trying to find a "bad" class which breaks a dependency.
1063 // Such a class is called a "witness" to the broken dependency.
1064 // While searching around, we ignore "participants", which
1065 // are already known to the dependency.
1066 class ClassHierarchyWalker {
1067  public:
1068   enum { PARTICIPANT_LIMIT = 3 };
1069 
1070  private:
1071   // optional method descriptor to check for:
1072   Symbol* _name;
1073   Symbol* _signature;
1074 
1075   // special classes which are not allowed to be witnesses:
1076   Klass*    _participants[PARTICIPANT_LIMIT+1];
1077   int       _num_participants;
1078 
1079   // cache of method lookups
1080   Method* _found_methods[PARTICIPANT_LIMIT+1];
1081 
1082   // if non-zero, tells how many witnesses to convert to participants
1083   int       _record_witnesses;
1084 
1085   void initialize(Klass* participant) {
1086     _record_witnesses = 0;
1087     _participants[0]  = participant;
1088     _found_methods[0] = NULL;
1089     _num_participants = 0;
1090     if (participant != NULL) {
1091       // Terminating NULL.
1092       _participants[1] = NULL;
1093       _found_methods[1] = NULL;
1094       _num_participants = 1;
1095     }
1096   }
1097 
1098   void initialize_from_method(Method* m) {
1099     assert(m != NULL && m->is_method(), "sanity");
1100     _name      = m->name();
1101     _signature = m->signature();
1102   }
1103 
1104  public:
1105   // The walker is initialized to recognize certain methods and/or types
1106   // as friendly participants.
1107   ClassHierarchyWalker(Klass* participant, Method* m) {
1108     initialize_from_method(m);
1109     initialize(participant);
1110   }
1111   ClassHierarchyWalker(Method* m) {
1112     initialize_from_method(m);
1113     initialize(NULL);
1114   }
1115   ClassHierarchyWalker(Klass* participant = NULL) {
1116     _name      = NULL;
1117     _signature = NULL;
1118     initialize(participant);
1119   }
1120 
1121   // This is common code for two searches:  One for concrete subtypes,
1122   // the other for concrete method implementations and overrides.
1123   bool doing_subtype_search() {
1124     return _name == NULL;
1125   }
1126 
1127   int num_participants() { return _num_participants; }
1128   Klass* participant(int n) {
1129     assert((uint)n <= (uint)_num_participants, "oob");
1130     return _participants[n];
1131   }
1132 
1133   // Note:  If n==num_participants, returns NULL.
1134   Method* found_method(int n) {
1135     assert((uint)n <= (uint)_num_participants, "oob");
1136     Method* fm = _found_methods[n];
1137     assert(n == _num_participants || fm != NULL, "proper usage");
1138     if (fm != NULL && fm->method_holder() != _participants[n]) {
1139       // Default methods from interfaces can be added to classes. In
1140       // that case the holder of the method is not the class but the
1141       // interface where it's defined.
1142       assert(fm->is_default_method(), "sanity");
1143       return NULL;
1144     }
1145     return fm;
1146   }
1147 
1148 #ifdef ASSERT
1149   // Assert that m is inherited into ctxk, without intervening overrides.
1150   // (May return true even if this is not true, in corner cases where we punt.)
1151   bool check_method_context(Klass* ctxk, Method* m) {
1152     if (m->method_holder() == ctxk)
1153       return true;  // Quick win.
1154     if (m->is_private())
1155       return false; // Quick lose.  Should not happen.
1156     if (!(m->is_public() || m->is_protected()))
1157       // The override story is complex when packages get involved.
1158       return true;  // Must punt the assertion to true.
1159     Method* lm = ctxk->lookup_method(m->name(), m->signature());
1160     if (lm == NULL && ctxk->is_instance_klass()) {
1161       // It might be an interface method
1162       lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(),
1163                                                                           m->signature());
1164     }
1165     if (lm == m)
1166       // Method m is inherited into ctxk.
1167       return true;
1168     if (lm != NULL) {
1169       if (!(lm->is_public() || lm->is_protected())) {
1170         // Method is [package-]private, so the override story is complex.
1171         return true;  // Must punt the assertion to true.
1172       }
1173       if (lm->is_static()) {
1174         // Static methods don't override non-static so punt
1175         return true;
1176       }
1177       if (!Dependencies::is_concrete_method(lm, ctxk) &&
1178           !Dependencies::is_concrete_method(m, ctxk)) {
1179         // They are both non-concrete
1180         if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1181           // Method m is overridden by lm, but both are non-concrete.
1182           return true;
1183         }
1184         if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1185             ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1186           // Interface method defined in multiple super interfaces
1187           return true;
1188         }
1189       }
1190     }
1191     ResourceMark rm;
1192     tty->print_cr("Dependency method not found in the associated context:");
1193     tty->print_cr("  context = %s", ctxk->external_name());
1194     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
1195     if (lm != NULL) {
1196       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
1197     }
1198     return false;
1199   }
1200 #endif
1201 
1202   void add_participant(Klass* participant) {
1203     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1204     int np = _num_participants++;
1205     _participants[np] = participant;
1206     _participants[np+1] = NULL;
1207     _found_methods[np+1] = NULL;
1208   }
1209 
1210   void record_witnesses(int add) {
1211     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
1212     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1213     _record_witnesses = add;
1214   }
1215 
1216   bool is_witness(Klass* k) {
1217     if (doing_subtype_search()) {
1218       return Dependencies::is_concrete_klass(k);
1219     } else if (!k->is_instance_klass()) {
1220       return false; // no methods to find in an array type
1221     } else {
1222       // Search class hierarchy first.
1223       Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature);
1224       if (!Dependencies::is_concrete_method(m, k)) {
1225         // Check interface defaults also, if any exist.
1226         Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
1227         if (default_methods == NULL)
1228             return false;
1229         m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
1230         if (!Dependencies::is_concrete_method(m, NULL))
1231             return false;
1232       }
1233       _found_methods[_num_participants] = m;
1234       // Note:  If add_participant(k) is called,
1235       // the method m will already be memoized for it.
1236       return true;
1237     }
1238   }
1239 
1240   bool is_participant(Klass* k) {
1241     if (k == _participants[0]) {
1242       return true;
1243     } else if (_num_participants <= 1) {
1244       return false;
1245     } else {
1246       return in_list(k, &_participants[1]);
1247     }
1248   }
1249   bool ignore_witness(Klass* witness) {
1250     if (_record_witnesses == 0) {
1251       return false;
1252     } else {
1253       --_record_witnesses;
1254       add_participant(witness);
1255       return true;
1256     }
1257   }
1258   static bool in_list(Klass* x, Klass** list) {
1259     for (int i = 0; ; i++) {
1260       Klass* y = list[i];
1261       if (y == NULL)  break;
1262       if (y == x)  return true;
1263     }
1264     return false;  // not in list
1265   }
1266 
1267  private:
1268   // the actual search method:
1269   Klass* find_witness_anywhere(Klass* context_type,
1270                                  bool participants_hide_witnesses,
1271                                  bool top_level_call = true);
1272   // the spot-checking version:
1273   Klass* find_witness_in(KlassDepChange& changes,
1274                          Klass* context_type,
1275                            bool participants_hide_witnesses);
1276  public:
1277   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
1278     assert(doing_subtype_search(), "must set up a subtype search");
1279     // When looking for unexpected concrete types,
1280     // do not look beneath expected ones.
1281     const bool participants_hide_witnesses = true;
1282     // CX > CC > C' is OK, even if C' is new.
1283     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
1284     if (changes != NULL) {
1285       return find_witness_in(*changes, context_type, participants_hide_witnesses);
1286     } else {
1287       return find_witness_anywhere(context_type, participants_hide_witnesses);
1288     }
1289   }
1290   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
1291     assert(!doing_subtype_search(), "must set up a method definer search");
1292     // When looking for unexpected concrete methods,
1293     // look beneath expected ones, to see if there are overrides.
1294     const bool participants_hide_witnesses = true;
1295     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1296     if (changes != NULL) {
1297       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
1298     } else {
1299       return find_witness_anywhere(context_type, !participants_hide_witnesses);
1300     }
1301   }
1302 };
1303 
1304 #ifndef PRODUCT
1305 static int deps_find_witness_calls = 0;
1306 static int deps_find_witness_steps = 0;
1307 static int deps_find_witness_recursions = 0;
1308 static int deps_find_witness_singles = 0;
1309 static int deps_find_witness_print = 0; // set to -1 to force a final print
1310 static bool count_find_witness_calls() {
1311   if (TraceDependencies || LogCompilation) {
1312     int pcount = deps_find_witness_print + 1;
1313     bool final_stats      = (pcount == 0);
1314     bool initial_call     = (pcount == 1);
1315     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1316     if (pcount < 0)  pcount = 1; // crude overflow protection
1317     deps_find_witness_print = pcount;
1318     if (VerifyDependencies && initial_call) {
1319       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1320     }
1321     if (occasional_print || final_stats) {
1322       // Every now and then dump a little info about dependency searching.
1323       if (xtty != NULL) {
1324        ttyLocker ttyl;
1325        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1326                    deps_find_witness_calls,
1327                    deps_find_witness_steps,
1328                    deps_find_witness_recursions,
1329                    deps_find_witness_singles);
1330       }
1331       if (final_stats || (TraceDependencies && WizardMode)) {
1332         ttyLocker ttyl;
1333         tty->print_cr("Dependency check (find_witness) "
1334                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1335                       deps_find_witness_calls,
1336                       deps_find_witness_steps,
1337                       (double)deps_find_witness_steps / deps_find_witness_calls,
1338                       deps_find_witness_recursions,
1339                       deps_find_witness_singles);
1340       }
1341     }
1342     return true;
1343   }
1344   return false;
1345 }
1346 #else
1347 #define count_find_witness_calls() (0)
1348 #endif //PRODUCT
1349 
1350 
1351 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1352                                                Klass* context_type,
1353                                                bool participants_hide_witnesses) {
1354   assert(changes.involves_context(context_type), "irrelevant dependency");
1355   Klass* new_type = changes.new_type();
1356 
1357   (void)count_find_witness_calls();
1358   NOT_PRODUCT(deps_find_witness_singles++);
1359 
1360   // Current thread must be in VM (not native mode, as in CI):
1361   assert(must_be_in_vm(), "raw oops here");
1362   // Must not move the class hierarchy during this check:
1363   assert_locked_or_safepoint(Compile_lock);
1364 
1365   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1366   if (nof_impls > 1) {
1367     // Avoid this case: *I.m > { A.m, C }; B.m > C
1368     // %%% Until this is fixed more systematically, bail out.
1369     // See corresponding comment in find_witness_anywhere.
1370     return context_type;
1371   }
1372 
1373   assert(!is_participant(new_type), "only old classes are participants");
1374   if (participants_hide_witnesses) {
1375     // If the new type is a subtype of a participant, we are done.
1376     for (int i = 0; i < num_participants(); i++) {
1377       Klass* part = participant(i);
1378       if (part == NULL)  continue;
1379       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1380              "correct marking of participants, b/c new_type is unique");
1381       if (changes.involves_context(part)) {
1382         // new guy is protected from this check by previous participant
1383         return NULL;
1384       }
1385     }
1386   }
1387 
1388   if (is_witness(new_type) &&
1389       !ignore_witness(new_type)) {
1390     return new_type;
1391   }
1392 
1393   return NULL;
1394 }
1395 
1396 
1397 // Walk hierarchy under a context type, looking for unexpected types.
1398 // Do not report participant types, and recursively walk beneath
1399 // them only if participants_hide_witnesses is false.
1400 // If top_level_call is false, skip testing the context type,
1401 // because the caller has already considered it.
1402 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1403                                                      bool participants_hide_witnesses,
1404                                                      bool top_level_call) {
1405   // Current thread must be in VM (not native mode, as in CI):
1406   assert(must_be_in_vm(), "raw oops here");
1407   // Must not move the class hierarchy during this check:
1408   assert_locked_or_safepoint(Compile_lock);
1409 
1410   bool do_counts = count_find_witness_calls();
1411 
1412   // Check the root of the sub-hierarchy first.
1413   if (top_level_call) {
1414     if (do_counts) {
1415       NOT_PRODUCT(deps_find_witness_calls++);
1416       NOT_PRODUCT(deps_find_witness_steps++);
1417     }
1418     if (is_participant(context_type)) {
1419       if (participants_hide_witnesses)  return NULL;
1420       // else fall through to search loop...
1421     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1422       // The context is an abstract class or interface, to start with.
1423       return context_type;
1424     }
1425   }
1426 
1427   // Now we must check each implementor and each subclass.
1428   // Use a short worklist to avoid blowing the stack.
1429   // Each worklist entry is a *chain* of subklass siblings to process.
1430   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1431   Klass* chains[CHAINMAX];
1432   int    chaini = 0;  // index into worklist
1433   Klass* chain;       // scratch variable
1434 #define ADD_SUBCLASS_CHAIN(k)                     {  \
1435     assert(chaini < CHAINMAX, "oob");                \
1436     chain = k->subklass();                           \
1437     if (chain != NULL)  chains[chaini++] = chain;    }
1438 
1439   // Look for non-abstract subclasses.
1440   // (Note:  Interfaces do not have subclasses.)
1441   ADD_SUBCLASS_CHAIN(context_type);
1442 
1443   // If it is an interface, search its direct implementors.
1444   // (Their subclasses are additional indirect implementors.
1445   // See InstanceKlass::add_implementor.)
1446   // (Note:  nof_implementors is always zero for non-interfaces.)
1447   if (top_level_call) {
1448     int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1449     if (nof_impls > 1) {
1450       // Avoid this case: *I.m > { A.m, C }; B.m > C
1451       // Here, I.m has 2 concrete implementations, but m appears unique
1452       // as A.m, because the search misses B.m when checking C.
1453       // The inherited method B.m was getting missed by the walker
1454       // when interface 'I' was the starting point.
1455       // %%% Until this is fixed more systematically, bail out.
1456       // (Old CHA had the same limitation.)
1457       return context_type;
1458     }
1459     if (nof_impls > 0) {
1460       Klass* impl = InstanceKlass::cast(context_type)->implementor();
1461       assert(impl != NULL, "just checking");
1462       // If impl is the same as the context_type, then more than one
1463       // implementor has seen. No exact info in this case.
1464       if (impl == context_type) {
1465         return context_type;  // report an inexact witness to this sad affair
1466       }
1467       if (do_counts)
1468         { NOT_PRODUCT(deps_find_witness_steps++); }
1469       if (is_participant(impl)) {
1470         if (!participants_hide_witnesses) {
1471           ADD_SUBCLASS_CHAIN(impl);
1472         }
1473       } else if (is_witness(impl) && !ignore_witness(impl)) {
1474         return impl;
1475       } else {
1476         ADD_SUBCLASS_CHAIN(impl);
1477       }
1478     }
1479   }
1480 
1481   // Recursively process each non-trivial sibling chain.
1482   while (chaini > 0) {
1483     Klass* chain = chains[--chaini];
1484     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1485       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1486       if (is_participant(sub)) {
1487         if (participants_hide_witnesses)  continue;
1488         // else fall through to process this guy's subclasses
1489       } else if (is_witness(sub) && !ignore_witness(sub)) {
1490         return sub;
1491       }
1492       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1493         // Fast path.  (Partially disabled if VerifyDependencies.)
1494         ADD_SUBCLASS_CHAIN(sub);
1495       } else {
1496         // Worklist overflow.  Do a recursive call.  Should be rare.
1497         // The recursive call will have its own worklist, of course.
1498         // (Note that sub has already been tested, so that there is
1499         // no need for the recursive call to re-test.  That's handy,
1500         // since the recursive call sees sub as the context_type.)
1501         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1502         Klass* witness = find_witness_anywhere(sub,
1503                                                  participants_hide_witnesses,
1504                                                  /*top_level_call=*/ false);
1505         if (witness != NULL)  return witness;
1506       }
1507     }
1508   }
1509 
1510   // No witness found.  The dependency remains unbroken.
1511   return NULL;
1512 #undef ADD_SUBCLASS_CHAIN
1513 }
1514 
1515 
1516 bool Dependencies::is_concrete_klass(Klass* k) {
1517   if (k->is_abstract())  return false;
1518   // %%% We could treat classes which are concrete but
1519   // have not yet been instantiated as virtually abstract.
1520   // This would require a deoptimization barrier on first instantiation.
1521   //if (k->is_not_instantiated())  return false;
1522   return true;
1523 }
1524 
1525 bool Dependencies::is_concrete_method(Method* m, Klass * k) {
1526   // NULL is not a concrete method,
1527   // statics are irrelevant to virtual call sites,
1528   // abstract methods are not concrete,
1529   // overpass (error) methods are not concrete if k is abstract
1530   //
1531   // note "true" is conservative answer --
1532   //     overpass clause is false if k == NULL, implies return true if
1533   //     answer depends on overpass clause.
1534   return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
1535              (m->is_overpass() && k != NULL && k -> is_abstract()) );
1536 }
1537 
1538 
1539 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1540   if (k->is_interface())  return NULL;
1541   if (k->has_finalizer()) return k;
1542   k = k->subklass();
1543   while (k != NULL) {
1544     Klass* result = find_finalizable_subclass(k);
1545     if (result != NULL) return result;
1546     k = k->next_sibling();
1547   }
1548   return NULL;
1549 }
1550 
1551 
1552 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1553   if (k->is_abstract())  return false;
1554   // We could also return false if k does not yet appear to be
1555   // instantiated, if the VM version supports this distinction also.
1556   //if (k->is_not_instantiated())  return false;
1557   return true;
1558 }
1559 
1560 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1561   return k->has_finalizable_subclass();
1562 }
1563 
1564 
1565 // Any use of the contents (bytecodes) of a method must be
1566 // marked by an "evol_method" dependency, if those contents
1567 // can change.  (Note: A method is always dependent on itself.)
1568 Klass* Dependencies::check_evol_method(Method* m) {
1569   assert(must_be_in_vm(), "raw oops here");
1570   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1571   // Or is there a now a breakpoint?
1572   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1573   if (m->is_old()
1574       || m->number_of_breakpoints() > 0) {
1575     return m->method_holder();
1576   } else {
1577     return NULL;
1578   }
1579 }
1580 
1581 // This is a strong assertion:  It is that the given type
1582 // has no subtypes whatever.  It is most useful for
1583 // optimizing checks on reflected types or on array types.
1584 // (Checks on types which are derived from real instances
1585 // can be optimized more strongly than this, because we
1586 // know that the checked type comes from a concrete type,
1587 // and therefore we can disregard abstract types.)
1588 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1589   assert(must_be_in_vm(), "raw oops here");
1590   assert_locked_or_safepoint(Compile_lock);
1591   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1592   Klass* sub = ctx->subklass();
1593   if (sub != NULL) {
1594     return sub;
1595   } else if (ctx->nof_implementors() != 0) {
1596     // if it is an interface, it must be unimplemented
1597     // (if it is not an interface, nof_implementors is always zero)
1598     Klass* impl = ctx->implementor();
1599     assert(impl != NULL, "must be set");
1600     return impl;
1601   } else {
1602     return NULL;
1603   }
1604 }
1605 
1606 // Test the assertion that conck is the only concrete subtype* of ctxk.
1607 // The type conck itself is allowed to have have further concrete subtypes.
1608 // This allows the compiler to narrow occurrences of ctxk by conck,
1609 // when dealing with the types of actual instances.
1610 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1611                                                                    Klass* conck,
1612                                                                    KlassDepChange* changes) {
1613   ClassHierarchyWalker wf(conck);
1614   return wf.find_witness_subtype(ctxk, changes);
1615 }
1616 
1617 // If a non-concrete class has no concrete subtypes, it is not (yet)
1618 // instantiatable.  This can allow the compiler to make some paths go
1619 // dead, if they are gated by a test of the type.
1620 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1621                                                                KlassDepChange* changes) {
1622   // Find any concrete subtype, with no participants:
1623   ClassHierarchyWalker wf;
1624   return wf.find_witness_subtype(ctxk, changes);
1625 }
1626 
1627 
1628 // If a concrete class has no concrete subtypes, it can always be
1629 // exactly typed.  This allows the use of a cheaper type test.
1630 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1631                                                                KlassDepChange* changes) {
1632   // Find any concrete subtype, with only the ctxk as participant:
1633   ClassHierarchyWalker wf(ctxk);
1634   return wf.find_witness_subtype(ctxk, changes);
1635 }
1636 
1637 
1638 // Find the unique concrete proper subtype of ctxk, or NULL if there
1639 // is more than one concrete proper subtype.  If there are no concrete
1640 // proper subtypes, return ctxk itself, whether it is concrete or not.
1641 // The returned subtype is allowed to have have further concrete subtypes.
1642 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1643 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1644   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1645   wf.record_witnesses(1);          // Record one other witness when walking.
1646   Klass* wit = wf.find_witness_subtype(ctxk);
1647   if (wit != NULL)  return NULL;   // Too many witnesses.
1648   Klass* conck = wf.participant(0);
1649   if (conck == NULL) {
1650 #ifndef PRODUCT
1651     // Make sure the dependency mechanism will pass this discovery:
1652     if (VerifyDependencies) {
1653       // Turn off dependency tracing while actually testing deps.
1654       FlagSetting fs(TraceDependencies, false);
1655       if (!Dependencies::is_concrete_klass(ctxk)) {
1656         guarantee(NULL ==
1657                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
1658                   "verify dep.");
1659       } else {
1660         guarantee(NULL ==
1661                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
1662                   "verify dep.");
1663       }
1664     }
1665 #endif //PRODUCT
1666     return ctxk;                   // Return ctxk as a flag for "no subtypes".
1667   } else {
1668 #ifndef PRODUCT
1669     // Make sure the dependency mechanism will pass this discovery:
1670     if (VerifyDependencies) {
1671       // Turn off dependency tracing while actually testing deps.
1672       FlagSetting fs(TraceDependencies, false);
1673       if (!Dependencies::is_concrete_klass(ctxk)) {
1674         guarantee(NULL == (void *)
1675                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
1676                   "verify dep.");
1677       }
1678     }
1679 #endif //PRODUCT
1680     return conck;
1681   }
1682 }
1683 
1684 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1685 // except possibly for further subtypes of k[12] themselves.
1686 // The context type must be abstract.  The types k1 and k2 are themselves
1687 // allowed to have further concrete subtypes.
1688 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1689                                                 Klass* ctxk,
1690                                                 Klass* k1,
1691                                                 Klass* k2,
1692                                                 KlassDepChange* changes) {
1693   ClassHierarchyWalker wf;
1694   wf.add_participant(k1);
1695   wf.add_participant(k2);
1696   return wf.find_witness_subtype(ctxk, changes);
1697 }
1698 
1699 // Search ctxk for concrete implementations.  If there are klen or fewer,
1700 // pack them into the given array and return the number.
1701 // Otherwise, return -1, meaning the given array would overflow.
1702 // (Note that a return of 0 means there are exactly no concrete subtypes.)
1703 // In this search, if ctxk is concrete, it will be reported alone.
1704 // For any type CC reported, no proper subtypes of CC will be reported.
1705 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1706                                                    int klen,
1707                                                    Klass* karray[]) {
1708   ClassHierarchyWalker wf;
1709   wf.record_witnesses(klen);
1710   Klass* wit = wf.find_witness_subtype(ctxk);
1711   if (wit != NULL)  return -1;  // Too many witnesses.
1712   int num = wf.num_participants();
1713   assert(num <= klen, "oob");
1714   // Pack the result array with the good news.
1715   for (int i = 0; i < num; i++)
1716     karray[i] = wf.participant(i);
1717 #ifndef PRODUCT
1718   // Make sure the dependency mechanism will pass this discovery:
1719   if (VerifyDependencies) {
1720     // Turn off dependency tracing while actually testing deps.
1721     FlagSetting fs(TraceDependencies, false);
1722     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1723     case -1: // ctxk was itself concrete
1724       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1725       break;
1726     case 0:
1727       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1728                 "verify dep.");
1729       break;
1730     case 1:
1731       guarantee(NULL == (void *)
1732                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1733                 "verify dep.");
1734       break;
1735     case 2:
1736       guarantee(NULL == (void *)
1737                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
1738                                                                 karray[0],
1739                                                                 karray[1]),
1740                 "verify dep.");
1741       break;
1742     default:
1743       ShouldNotReachHere();  // klen > 2 yet supported
1744     }
1745   }
1746 #endif //PRODUCT
1747   return num;
1748 }
1749 
1750 // If a class (or interface) has a unique concrete method uniqm, return NULL.
1751 // Otherwise, return a class that contains an interfering method.
1752 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1753                                                     KlassDepChange* changes) {
1754   // Here is a missing optimization:  If uniqm->is_final(),
1755   // we don't really need to search beneath it for overrides.
1756   // This is probably not important, since we don't use dependencies
1757   // to track final methods.  (They can't be "definalized".)
1758   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1759   return wf.find_witness_definer(ctxk, changes);
1760 }
1761 
1762 // Find the set of all non-abstract methods under ctxk that match m.
1763 // (The method m must be defined or inherited in ctxk.)
1764 // Include m itself in the set, unless it is abstract.
1765 // If this set has exactly one element, return that element.
1766 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1767   // Return NULL if m is marked old; must have been a redefined method.
1768   if (m->is_old()) {
1769     return NULL;
1770   }
1771   ClassHierarchyWalker wf(m);
1772   assert(wf.check_method_context(ctxk, m), "proper context");
1773   wf.record_witnesses(1);
1774   Klass* wit = wf.find_witness_definer(ctxk);
1775   if (wit != NULL)  return NULL;  // Too many witnesses.
1776   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1777   if (Dependencies::is_concrete_method(m, ctxk)) {
1778     if (fm == NULL) {
1779       // It turns out that m was always the only implementation.
1780       fm = m;
1781     } else if (fm != m) {
1782       // Two conflicting implementations after all.
1783       // (This can happen if m is inherited into ctxk and fm overrides it.)
1784       return NULL;
1785     }
1786   }
1787 #ifndef PRODUCT
1788   // Make sure the dependency mechanism will pass this discovery:
1789   if (VerifyDependencies && fm != NULL) {
1790     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1791               "verify dep.");
1792   }
1793 #endif //PRODUCT
1794   return fm;
1795 }
1796 
1797 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1798                                                         Method* m1,
1799                                                         Method* m2,
1800                                                         KlassDepChange* changes) {
1801   ClassHierarchyWalker wf(m1);
1802   wf.add_participant(m1->method_holder());
1803   wf.add_participant(m2->method_holder());
1804   return wf.find_witness_definer(ctxk, changes);
1805 }
1806 
1807 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1808   Klass* search_at = ctxk;
1809   if (changes != NULL)
1810     search_at = changes->new_type(); // just look at the new bit
1811   return find_finalizable_subclass(search_at);
1812 }
1813 
1814 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1815   assert(call_site != NULL, "sanity");
1816   assert(method_handle != NULL, "sanity");
1817   assert(call_site->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1818 
1819   if (changes == NULL) {
1820     // Validate all CallSites
1821     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1822       return call_site->klass();  // assertion failed
1823   } else {
1824     // Validate the given CallSite
1825     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1826       assert(method_handle != changes->method_handle(), "must be");
1827       return call_site->klass();  // assertion failed
1828     }
1829   }
1830   return NULL;  // assertion still valid
1831 }
1832 
1833 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1834   if (witness != NULL) {
1835     if (TraceDependencies) {
1836       print_dependency(witness, /*verbose=*/ true);
1837     }
1838     // The following is a no-op unless logging is enabled:
1839     log_dependency(witness);
1840   }
1841 }
1842 
1843 
1844 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1845   assert_locked_or_safepoint(Compile_lock);
1846   Dependencies::check_valid_dependency_type(type());
1847 
1848   Klass* witness = NULL;
1849   switch (type()) {
1850   case evol_method:
1851     witness = check_evol_method(method_argument(0));
1852     break;
1853   case leaf_type:
1854     witness = check_leaf_type(context_type());
1855     break;
1856   case abstract_with_unique_concrete_subtype:
1857     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1858     break;
1859   case abstract_with_no_concrete_subtype:
1860     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1861     break;
1862   case concrete_with_no_concrete_subtype:
1863     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1864     break;
1865   case unique_concrete_method:
1866     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1867     break;
1868   case abstract_with_exclusive_concrete_subtypes_2:
1869     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1870     break;
1871   case exclusive_concrete_methods_2:
1872     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1873     break;
1874   case no_finalizable_subclasses:
1875     witness = check_has_no_finalizable_subclasses(context_type(), changes);
1876     break;
1877   default:
1878     witness = NULL;
1879     break;
1880   }
1881   trace_and_log_witness(witness);
1882   return witness;
1883 }
1884 
1885 
1886 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1887   assert_locked_or_safepoint(Compile_lock);
1888   Dependencies::check_valid_dependency_type(type());
1889 
1890   Klass* witness = NULL;
1891   switch (type()) {
1892   case call_site_target_value:
1893     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1894     break;
1895   default:
1896     witness = NULL;
1897     break;
1898   }
1899   trace_and_log_witness(witness);
1900   return witness;
1901 }
1902 
1903 
1904 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1905   // Handle klass dependency
1906   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1907     return check_klass_dependency(changes.as_klass_change());
1908 
1909   // Handle CallSite dependency
1910   if (changes.is_call_site_change())
1911     return check_call_site_dependency(changes.as_call_site_change());
1912 
1913   // irrelevant dependency; skip it
1914   return NULL;
1915 }
1916 
1917 
1918 void DepChange::print() {
1919   int nsup = 0, nint = 0;
1920   for (ContextStream str(*this); str.next(); ) {
1921     Klass* k = str.klass();
1922     switch (str.change_type()) {
1923     case Change_new_type:
1924       tty->print_cr("  dependee = %s", k->external_name());
1925       break;
1926     case Change_new_sub:
1927       if (!WizardMode) {
1928         ++nsup;
1929       } else {
1930         tty->print_cr("  context super = %s", k->external_name());
1931       }
1932       break;
1933     case Change_new_impl:
1934       if (!WizardMode) {
1935         ++nint;
1936       } else {
1937         tty->print_cr("  context interface = %s", k->external_name());
1938       }
1939       break;
1940     default:
1941       break;
1942     }
1943   }
1944   if (nsup + nint != 0) {
1945     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1946   }
1947 }
1948 
1949 void DepChange::ContextStream::start() {
1950   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1951   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1952   _klass = new_type;
1953   _ti_base = NULL;
1954   _ti_index = 0;
1955   _ti_limit = 0;
1956 }
1957 
1958 bool DepChange::ContextStream::next() {
1959   switch (_change_type) {
1960   case Start_Klass:             // initial state; _klass is the new type
1961     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1962     _ti_index = 0;
1963     _change_type = Change_new_type;
1964     return true;
1965   case Change_new_type:
1966     // fall through:
1967     _change_type = Change_new_sub;
1968   case Change_new_sub:
1969     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1970     {
1971       _klass = _klass->super();
1972       if (_klass != NULL) {
1973         return true;
1974       }
1975     }
1976     // else set up _ti_limit and fall through:
1977     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1978     _change_type = Change_new_impl;
1979   case Change_new_impl:
1980     if (_ti_index < _ti_limit) {
1981       _klass = _ti_base->at(_ti_index++);
1982       return true;
1983     }
1984     // fall through:
1985     _change_type = NO_CHANGE;  // iterator is exhausted
1986   case NO_CHANGE:
1987     break;
1988   default:
1989     ShouldNotReachHere();
1990   }
1991   return false;
1992 }
1993 
1994 void KlassDepChange::initialize() {
1995   // entire transaction must be under this lock:
1996   assert_lock_strong(Compile_lock);
1997 
1998   // Mark all dependee and all its superclasses
1999   // Mark transitive interfaces
2000   for (ContextStream str(*this); str.next(); ) {
2001     Klass* d = str.klass();
2002     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
2003     InstanceKlass::cast(d)->set_is_marked_dependent(true);
2004   }
2005 }
2006 
2007 KlassDepChange::~KlassDepChange() {
2008   // Unmark all dependee and all its superclasses
2009   // Unmark transitive interfaces
2010   for (ContextStream str(*this); str.next(); ) {
2011     Klass* d = str.klass();
2012     InstanceKlass::cast(d)->set_is_marked_dependent(false);
2013   }
2014 }
2015 
2016 bool KlassDepChange::involves_context(Klass* k) {
2017   if (k == NULL || !k->is_instance_klass()) {
2018     return false;
2019   }
2020   InstanceKlass* ik = InstanceKlass::cast(k);
2021   bool is_contained = ik->is_marked_dependent();
2022   assert(is_contained == new_type()->is_subtype_of(k),
2023          "correct marking of potential context types");
2024   return is_contained;
2025 }
2026 
2027 #ifndef PRODUCT
2028 void Dependencies::print_statistics() {
2029   if (deps_find_witness_print != 0) {
2030     // Call one final time, to flush out the data.
2031     deps_find_witness_print = -1;
2032     count_find_witness_calls();
2033   }
2034 }
2035 #endif
2036 
2037 CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
2038   _call_site(call_site),
2039   _method_handle(method_handle) {
2040   assert(_call_site()->is_a(SystemDictionary::CallSite_klass()), "must be");
2041   assert(_method_handle.is_null() || _method_handle()->is_a(SystemDictionary::MethodHandle_klass()), "must be");
2042 }