1 /*
   2  * Copyright (c) 2005, 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciArrayKlass.hpp"
  27 #include "ci/ciEnv.hpp"
  28 #include "ci/ciKlass.hpp"
  29 #include "ci/ciMethod.hpp"
  30 #include "classfile/javaClasses.inline.hpp"
  31 #include "code/dependencies.hpp"
  32 #include "compiler/compileLog.hpp"
  33 #include "memory/resourceArea.hpp"
  34 #include "oops/oop.inline.hpp"
  35 #include "oops/objArrayKlass.hpp"
  36 #include "runtime/handles.hpp"
  37 #include "runtime/handles.inline.hpp"
  38 #include "runtime/thread.inline.hpp"
  39 #include "utilities/copy.hpp"
  40 
  41 
  42 #ifdef ASSERT
  43 static bool must_be_in_vm() {
  44   Thread* thread = Thread::current();
  45   if (thread->is_Java_thread())
  46     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  47   else
  48     return true;  //something like this: thread->is_VM_thread();
  49 }
  50 #endif //ASSERT
  51 
  52 void Dependencies::initialize(ciEnv* env) {
  53   Arena* arena = env->arena();
  54   _oop_recorder = env->oop_recorder();
  55   _log = env->log();
  56   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  57 #if INCLUDE_JVMCI
  58   _using_dep_values = false;
  59 #endif
  60   DEBUG_ONLY(_deps[end_marker] = NULL);
  61   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  62     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  63   }
  64   _content_bytes = NULL;
  65   _size_in_bytes = (size_t)-1;
  66 
  67   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  68 }
  69 
  70 void Dependencies::assert_evol_method(ciMethod* m) {
  71   assert_common_1(evol_method, m);
  72 }
  73 
  74 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  75   if (ctxk->is_array_klass()) {
  76     // As a special case, support this assertion on an array type,
  77     // which reduces to an assertion on its element type.
  78     // Note that this cannot be done with assertions that
  79     // relate to concreteness or abstractness.
  80     ciType* elemt = ctxk->as_array_klass()->base_element_type();
  81     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
  82     ctxk = elemt->as_instance_klass();
  83     //if (ctxk->is_final())  return;            // Ex:  String[][]
  84   }
  85   check_ctxk(ctxk);
  86   assert_common_1(leaf_type, ctxk);
  87 }
  88 
  89 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  90   check_ctxk_abstract(ctxk);
  91   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  92 }
  93 
  94 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
  95   check_ctxk_abstract(ctxk);
  96   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
  97 }
  98 
  99 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
 100   check_ctxk_concrete(ctxk);
 101   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
 102 }
 103 
 104 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
 105   check_ctxk(ctxk);
 106   assert_common_2(unique_concrete_method, ctxk, uniqm);
 107 }
 108 
 109 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
 110   check_ctxk(ctxk);
 111   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
 112 }
 113 
 114 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
 115   check_ctxk(ctxk);
 116   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
 117 }
 118 
 119 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
 120   check_ctxk(ctxk);
 121   assert_common_1(no_finalizable_subclasses, ctxk);
 122 }
 123 
 124 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
 125   assert_common_2(call_site_target_value, call_site, method_handle);
 126 }
 127 
 128 #if INCLUDE_JVMCI
 129 
 130 Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
 131   _oop_recorder = oop_recorder;
 132   _log = log;
 133   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
 134   _using_dep_values = true;
 135   DEBUG_ONLY(_dep_values[end_marker] = NULL);
 136   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
 137     _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
 138   }
 139   _content_bytes = NULL;
 140   _size_in_bytes = (size_t)-1;
 141 
 142   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
 143 }
 144 
 145 void Dependencies::assert_evol_method(Method* m) {
 146   assert_common_1(evol_method, DepValue(_oop_recorder, m));
 147 }
 148 
 149 void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
 150   check_ctxk(ctxk);
 151   assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
 152 }
 153 
 154 void Dependencies::assert_leaf_type(Klass* ctxk) {
 155   if (ctxk->is_array_klass()) {
 156     // As a special case, support this assertion on an array type,
 157     // which reduces to an assertion on its element type.
 158     // Note that this cannot be done with assertions that
 159     // relate to concreteness or abstractness.
 160     BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
 161     if (is_java_primitive(elemt))  return;   // Ex:  int[][]
 162     ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
 163     //if (ctxk->is_final())  return;            // Ex:  String[][]
 164   }
 165   check_ctxk(ctxk);
 166   assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
 167 }
 168 
 169 void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
 170   check_ctxk_abstract(ctxk);
 171   DepValue ctxk_dv(_oop_recorder, ctxk);
 172   DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
 173   assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
 174 }
 175 
 176 void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
 177   check_ctxk(ctxk);
 178   assert_common_2(unique_concrete_method, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
 179 }
 180 
 181 void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
 182   assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
 183 }
 184 
 185 #endif // INCLUDE_JVMCI
 186 
 187 
 188 // Helper function.  If we are adding a new dep. under ctxk2,
 189 // try to find an old dep. under a broader* ctxk1.  If there is
 190 //
 191 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
 192                                     int ctxk_i, ciKlass* ctxk2) {
 193   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
 194   if (ctxk2->is_subtype_of(ctxk1)) {
 195     return true;  // success, and no need to change
 196   } else if (ctxk1->is_subtype_of(ctxk2)) {
 197     // new context class fully subsumes previous one
 198     deps->at_put(ctxk_i, ctxk2);
 199     return true;
 200   } else {
 201     return false;
 202   }
 203 }
 204 
 205 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
 206   assert(dep_args(dept) == 1, "sanity");
 207   log_dependency(dept, x);
 208   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 209 
 210   // see if the same (or a similar) dep is already recorded
 211   if (note_dep_seen(dept, x)) {
 212     assert(deps->find(x) >= 0, "sanity");
 213   } else {
 214     deps->append(x);
 215   }
 216 }
 217 
 218 void Dependencies::assert_common_2(DepType dept,
 219                                    ciBaseObject* x0, ciBaseObject* x1) {
 220   assert(dep_args(dept) == 2, "sanity");
 221   log_dependency(dept, x0, x1);
 222   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 223 
 224   // see if the same (or a similar) dep is already recorded
 225   bool has_ctxk = has_explicit_context_arg(dept);
 226   if (has_ctxk) {
 227     assert(dep_context_arg(dept) == 0, "sanity");
 228     if (note_dep_seen(dept, x1)) {
 229       // look in this bucket for redundant assertions
 230       const int stride = 2;
 231       for (int i = deps->length(); (i -= stride) >= 0; ) {
 232         ciBaseObject* y1 = deps->at(i+1);
 233         if (x1 == y1) {  // same subject; check the context
 234           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
 235             return;
 236           }
 237         }
 238       }
 239     }
 240   } else {
 241     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 242       // look in this bucket for redundant assertions
 243       const int stride = 2;
 244       for (int i = deps->length(); (i -= stride) >= 0; ) {
 245         ciBaseObject* y0 = deps->at(i+0);
 246         ciBaseObject* y1 = deps->at(i+1);
 247         if (x0 == y0 && x1 == y1) {
 248           return;
 249         }
 250       }
 251     }
 252   }
 253 
 254   // append the assertion in the correct bucket:
 255   deps->append(x0);
 256   deps->append(x1);
 257 }
 258 
 259 void Dependencies::assert_common_3(DepType dept,
 260                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
 261   assert(dep_context_arg(dept) == 0, "sanity");
 262   assert(dep_args(dept) == 3, "sanity");
 263   log_dependency(dept, ctxk, x, x2);
 264   GrowableArray<ciBaseObject*>* deps = _deps[dept];
 265 
 266   // try to normalize an unordered pair:
 267   bool swap = false;
 268   switch (dept) {
 269   case abstract_with_exclusive_concrete_subtypes_2:
 270     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
 271     break;
 272   case exclusive_concrete_methods_2:
 273     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
 274     break;
 275   default:
 276     break;
 277   }
 278   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
 279 
 280   // see if the same (or a similar) dep is already recorded
 281   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
 282     // look in this bucket for redundant assertions
 283     const int stride = 3;
 284     for (int i = deps->length(); (i -= stride) >= 0; ) {
 285       ciBaseObject* y  = deps->at(i+1);
 286       ciBaseObject* y2 = deps->at(i+2);
 287       if (x == y && x2 == y2) {  // same subjects; check the context
 288         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
 289           return;
 290         }
 291       }
 292     }
 293   }
 294   // append the assertion in the correct bucket:
 295   deps->append(ctxk);
 296   deps->append(x);
 297   deps->append(x2);
 298 }
 299 
 300 #if INCLUDE_JVMCI
 301 bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
 302                                     int ctxk_i, DepValue ctxk2_dv) {
 303   Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
 304   Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
 305   if (ctxk2->is_subtype_of(ctxk1)) {
 306     return true;  // success, and no need to change
 307   } else if (ctxk1->is_subtype_of(ctxk2)) {
 308     // new context class fully subsumes previous one
 309     deps->at_put(ctxk_i, ctxk2_dv);
 310     return true;
 311   } else {
 312     return false;
 313   }
 314 }
 315 
 316 void Dependencies::assert_common_1(DepType dept, DepValue x) {
 317   assert(dep_args(dept) == 1, "sanity");
 318   //log_dependency(dept, x);
 319   GrowableArray<DepValue>* deps = _dep_values[dept];
 320 
 321   // see if the same (or a similar) dep is already recorded
 322   if (note_dep_seen(dept, x)) {
 323     assert(deps->find(x) >= 0, "sanity");
 324   } else {
 325     deps->append(x);
 326   }
 327 }
 328 
 329 void Dependencies::assert_common_2(DepType dept,
 330                                    DepValue x0, DepValue x1) {
 331   assert(dep_args(dept) == 2, "sanity");
 332   //log_dependency(dept, x0, x1);
 333   GrowableArray<DepValue>* deps = _dep_values[dept];
 334 
 335   // see if the same (or a similar) dep is already recorded
 336   bool has_ctxk = has_explicit_context_arg(dept);
 337   if (has_ctxk) {
 338     assert(dep_context_arg(dept) == 0, "sanity");
 339     if (note_dep_seen(dept, x1)) {
 340       // look in this bucket for redundant assertions
 341       const int stride = 2;
 342       for (int i = deps->length(); (i -= stride) >= 0; ) {
 343         DepValue y1 = deps->at(i+1);
 344         if (x1 == y1) {  // same subject; check the context
 345           if (maybe_merge_ctxk(deps, i+0, x0)) {
 346             return;
 347           }
 348         }
 349       }
 350     }
 351   } else {
 352     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
 353       // look in this bucket for redundant assertions
 354       const int stride = 2;
 355       for (int i = deps->length(); (i -= stride) >= 0; ) {
 356         DepValue y0 = deps->at(i+0);
 357         DepValue y1 = deps->at(i+1);
 358         if (x0 == y0 && x1 == y1) {
 359           return;
 360         }
 361       }
 362     }
 363   }
 364 
 365   // append the assertion in the correct bucket:
 366   deps->append(x0);
 367   deps->append(x1);
 368 }
 369 #endif // INCLUDE_JVMCI
 370 
 371 /// Support for encoding dependencies into an nmethod:
 372 
 373 void Dependencies::copy_to(nmethod* nm) {
 374   address beg = nm->dependencies_begin();
 375   address end = nm->dependencies_end();
 376   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
 377   Copy::disjoint_words((HeapWord*) content_bytes(),
 378                        (HeapWord*) beg,
 379                        size_in_bytes() / sizeof(HeapWord));
 380   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
 381 }
 382 
 383 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
 384   for (int i = 0; i < narg; i++) {
 385     int diff = p1[i]->ident() - p2[i]->ident();
 386     if (diff != 0)  return diff;
 387   }
 388   return 0;
 389 }
 390 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
 391 { return sort_dep(p1, p2, 1); }
 392 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
 393 { return sort_dep(p1, p2, 2); }
 394 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
 395 { return sort_dep(p1, p2, 3); }
 396 
 397 #if INCLUDE_JVMCI
 398 // metadata deps are sorted before object deps
 399 static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
 400   for (int i = 0; i < narg; i++) {
 401     int diff = p1[i].sort_key() - p2[i].sort_key();
 402     if (diff != 0)  return diff;
 403   }
 404   return 0;
 405 }
 406 static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 407 { return sort_dep_value(p1, p2, 1); }
 408 static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 409 { return sort_dep_value(p1, p2, 2); }
 410 static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
 411 { return sort_dep_value(p1, p2, 3); }
 412 #endif // INCLUDE_JVMCI
 413 
 414 void Dependencies::sort_all_deps() {
 415 #if INCLUDE_JVMCI
 416   if (_using_dep_values) {
 417     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 418       DepType dept = (DepType)deptv;
 419       GrowableArray<DepValue>* deps = _dep_values[dept];
 420       if (deps->length() <= 1)  continue;
 421       switch (dep_args(dept)) {
 422       case 1: deps->sort(sort_dep_value_arg_1, 1); break;
 423       case 2: deps->sort(sort_dep_value_arg_2, 2); break;
 424       case 3: deps->sort(sort_dep_value_arg_3, 3); break;
 425       default: ShouldNotReachHere(); break;
 426       }
 427     }
 428     return;
 429   }
 430 #endif // INCLUDE_JVMCI
 431   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 432     DepType dept = (DepType)deptv;
 433     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 434     if (deps->length() <= 1)  continue;
 435     switch (dep_args(dept)) {
 436     case 1: deps->sort(sort_dep_arg_1, 1); break;
 437     case 2: deps->sort(sort_dep_arg_2, 2); break;
 438     case 3: deps->sort(sort_dep_arg_3, 3); break;
 439     default: ShouldNotReachHere(); break;
 440     }
 441   }
 442 }
 443 
 444 size_t Dependencies::estimate_size_in_bytes() {
 445   size_t est_size = 100;
 446 #if INCLUDE_JVMCI
 447   if (_using_dep_values) {
 448     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 449       DepType dept = (DepType)deptv;
 450       GrowableArray<DepValue>* deps = _dep_values[dept];
 451       est_size += deps->length() * 2;  // tags and argument(s)
 452     }
 453     return est_size;
 454   }
 455 #endif // INCLUDE_JVMCI
 456   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 457     DepType dept = (DepType)deptv;
 458     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 459     est_size += deps->length()*2;  // tags and argument(s)
 460   }
 461   return est_size;
 462 }
 463 
 464 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
 465   switch (dept) {
 466   case abstract_with_exclusive_concrete_subtypes_2:
 467     return x->as_metadata()->as_klass();
 468   case unique_concrete_method:
 469   case exclusive_concrete_methods_2:
 470     return x->as_metadata()->as_method()->holder();
 471   default:
 472     return NULL;  // let NULL be NULL
 473   }
 474 }
 475 
 476 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
 477   assert(must_be_in_vm(), "raw oops here");
 478   switch (dept) {
 479   case abstract_with_exclusive_concrete_subtypes_2:
 480     assert(x->is_klass(), "sanity");
 481     return (Klass*) x;
 482   case unique_concrete_method:
 483   case exclusive_concrete_methods_2:
 484     assert(x->is_method(), "sanity");
 485     return ((Method*)x)->method_holder();
 486   default:
 487     return NULL;  // let NULL be NULL
 488   }
 489 }
 490 
 491 void Dependencies::encode_content_bytes() {
 492   sort_all_deps();
 493 
 494   // cast is safe, no deps can overflow INT_MAX
 495   CompressedWriteStream bytes((int)estimate_size_in_bytes());
 496 
 497 #if INCLUDE_JVMCI
 498   if (_using_dep_values) {
 499     for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 500       DepType dept = (DepType)deptv;
 501       GrowableArray<DepValue>* deps = _dep_values[dept];
 502       if (deps->length() == 0)  continue;
 503       int stride = dep_args(dept);
 504       int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 505       assert(stride > 0, "sanity");
 506       for (int i = 0; i < deps->length(); i += stride) {
 507         jbyte code_byte = (jbyte)dept;
 508         int skipj = -1;
 509         if (ctxkj >= 0 && ctxkj+1 < stride) {
 510           Klass*  ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
 511           DepValue x = deps->at(i+ctxkj+1);  // following argument
 512           if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
 513             skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 514             code_byte |= default_context_type_bit;
 515           }
 516         }
 517         bytes.write_byte(code_byte);
 518         for (int j = 0; j < stride; j++) {
 519           if (j == skipj)  continue;
 520           DepValue v = deps->at(i+j);
 521           int idx = v.index();
 522           bytes.write_int(idx);
 523         }
 524       }
 525     }
 526   } else {
 527 #endif // INCLUDE_JVMCI
 528   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 529     DepType dept = (DepType)deptv;
 530     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 531     if (deps->length() == 0)  continue;
 532     int stride = dep_args(dept);
 533     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
 534     assert(stride > 0, "sanity");
 535     for (int i = 0; i < deps->length(); i += stride) {
 536       jbyte code_byte = (jbyte)dept;
 537       int skipj = -1;
 538       if (ctxkj >= 0 && ctxkj+1 < stride) {
 539         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
 540         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
 541         if (ctxk == ctxk_encoded_as_null(dept, x)) {
 542           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
 543           code_byte |= default_context_type_bit;
 544         }
 545       }
 546       bytes.write_byte(code_byte);
 547       for (int j = 0; j < stride; j++) {
 548         if (j == skipj)  continue;
 549         ciBaseObject* v = deps->at(i+j);
 550         int idx;
 551         if (v->is_object()) {
 552           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
 553         } else {
 554           ciMetadata* meta = v->as_metadata();
 555           idx = _oop_recorder->find_index(meta->constant_encoding());
 556         }
 557         bytes.write_int(idx);
 558       }
 559     }
 560   }
 561 #if INCLUDE_JVMCI
 562   }
 563 #endif
 564 
 565   // write a sentinel byte to mark the end
 566   bytes.write_byte(end_marker);
 567 
 568   // round it out to a word boundary
 569   while (bytes.position() % sizeof(HeapWord) != 0) {
 570     bytes.write_byte(end_marker);
 571   }
 572 
 573   // check whether the dept byte encoding really works
 574   assert((jbyte)default_context_type_bit != 0, "byte overflow");
 575 
 576   _content_bytes = bytes.buffer();
 577   _size_in_bytes = bytes.position();
 578 }
 579 
 580 
 581 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
 582   "end_marker",
 583   "evol_method",
 584   "leaf_type",
 585   "abstract_with_unique_concrete_subtype",
 586   "abstract_with_no_concrete_subtype",
 587   "concrete_with_no_concrete_subtype",
 588   "unique_concrete_method",
 589   "abstract_with_exclusive_concrete_subtypes_2",
 590   "exclusive_concrete_methods_2",
 591   "no_finalizable_subclasses",
 592   "call_site_target_value"
 593 };
 594 
 595 int Dependencies::_dep_args[TYPE_LIMIT] = {
 596   -1,// end_marker
 597   1, // evol_method m
 598   1, // leaf_type ctxk
 599   2, // abstract_with_unique_concrete_subtype ctxk, k
 600   1, // abstract_with_no_concrete_subtype ctxk
 601   1, // concrete_with_no_concrete_subtype ctxk
 602   2, // unique_concrete_method ctxk, m
 603   3, // unique_concrete_subtypes_2 ctxk, k1, k2
 604   3, // unique_concrete_methods_2 ctxk, m1, m2
 605   1, // no_finalizable_subclasses ctxk
 606   2  // call_site_target_value call_site, method_handle
 607 };
 608 
 609 const char* Dependencies::dep_name(Dependencies::DepType dept) {
 610   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
 611   return _dep_name[dept];
 612 }
 613 
 614 int Dependencies::dep_args(Dependencies::DepType dept) {
 615   if (!dept_in_mask(dept, all_types))  return -1;
 616   return _dep_args[dept];
 617 }
 618 
 619 void Dependencies::check_valid_dependency_type(DepType dept) {
 620   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
 621 }
 622 
 623 // for the sake of the compiler log, print out current dependencies:
 624 void Dependencies::log_all_dependencies() {
 625   if (log() == NULL)  return;
 626   ResourceMark rm;
 627   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
 628     DepType dept = (DepType)deptv;
 629     GrowableArray<ciBaseObject*>* deps = _deps[dept];
 630     int deplen = deps->length();
 631     if (deplen == 0) {
 632       continue;
 633     }
 634     int stride = dep_args(dept);
 635     GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
 636     for (int i = 0; i < deps->length(); i += stride) {
 637       for (int j = 0; j < stride; j++) {
 638         // flush out the identities before printing
 639         ciargs->push(deps->at(i+j));
 640       }
 641       write_dependency_to(log(), dept, ciargs);
 642       ciargs->clear();
 643     }
 644     guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
 645   }
 646 }
 647 
 648 void Dependencies::write_dependency_to(CompileLog* log,
 649                                        DepType dept,
 650                                        GrowableArray<DepArgument>* args,
 651                                        Klass* witness) {
 652   if (log == NULL) {
 653     return;
 654   }
 655   ResourceMark rm;
 656   ciEnv* env = ciEnv::current();
 657   GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
 658   for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
 659     DepArgument arg = *it;
 660     if (arg.is_oop()) {
 661       ciargs->push(env->get_object(arg.oop_value()));
 662     } else {
 663       ciargs->push(env->get_metadata(arg.metadata_value()));
 664     }
 665   }
 666   int argslen = ciargs->length();
 667   Dependencies::write_dependency_to(log, dept, ciargs, witness);
 668   guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
 669 }
 670 
 671 void Dependencies::write_dependency_to(CompileLog* log,
 672                                        DepType dept,
 673                                        GrowableArray<ciBaseObject*>* args,
 674                                        Klass* witness) {
 675   if (log == NULL) {
 676     return;
 677   }
 678   ResourceMark rm;
 679   GrowableArray<int>* argids = new GrowableArray<int>(args->length());
 680   for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
 681     ciBaseObject* obj = *it;
 682     if (obj->is_object()) {
 683       argids->push(log->identify(obj->as_object()));
 684     } else {
 685       argids->push(log->identify(obj->as_metadata()));
 686     }
 687   }
 688   if (witness != NULL) {
 689     log->begin_elem("dependency_failed");
 690   } else {
 691     log->begin_elem("dependency");
 692   }
 693   log->print(" type='%s'", dep_name(dept));
 694   const int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 695   if (ctxkj >= 0 && ctxkj < argids->length()) {
 696     log->print(" ctxk='%d'", argids->at(ctxkj));
 697   }
 698   // write remaining arguments, if any.
 699   for (int j = 0; j < argids->length(); j++) {
 700     if (j == ctxkj)  continue;  // already logged
 701     if (j == 1) {
 702       log->print(  " x='%d'",    argids->at(j));
 703     } else {
 704       log->print(" x%d='%d'", j, argids->at(j));
 705     }
 706   }
 707   if (witness != NULL) {
 708     log->object("witness", witness);
 709     log->stamp();
 710   }
 711   log->end_elem();
 712 }
 713 
 714 void Dependencies::write_dependency_to(xmlStream* xtty,
 715                                        DepType dept,
 716                                        GrowableArray<DepArgument>* args,
 717                                        Klass* witness) {
 718   if (xtty == NULL) {
 719     return;
 720   }
 721   Thread* thread = Thread::current();
 722   HandleMark rm(thread);
 723   ttyLocker ttyl;
 724   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 725   if (witness != NULL) {
 726     xtty->begin_elem("dependency_failed");
 727   } else {
 728     xtty->begin_elem("dependency");
 729   }
 730   xtty->print(" type='%s'", dep_name(dept));
 731   if (ctxkj >= 0) {
 732     xtty->object("ctxk", args->at(ctxkj).metadata_value());
 733   }
 734   // write remaining arguments, if any.
 735   for (int j = 0; j < args->length(); j++) {
 736     if (j == ctxkj)  continue;  // already logged
 737     DepArgument arg = args->at(j);
 738     if (j == 1) {
 739       if (arg.is_oop()) {
 740         xtty->object("x", Handle(thread, arg.oop_value()));
 741       } else {
 742         xtty->object("x", arg.metadata_value());
 743       }
 744     } else {
 745       char xn[10]; sprintf(xn, "x%d", j);
 746       if (arg.is_oop()) {
 747         xtty->object(xn, Handle(thread, arg.oop_value()));
 748       } else {
 749         xtty->object(xn, arg.metadata_value());
 750       }
 751     }
 752   }
 753   if (witness != NULL) {
 754     xtty->object("witness", witness);
 755     xtty->stamp();
 756   }
 757   xtty->end_elem();
 758 }
 759 
 760 void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
 761                                     Klass* witness, outputStream* st) {
 762   ResourceMark rm;
 763   ttyLocker ttyl;   // keep the following output all in one block
 764   st->print_cr("%s of type %s",
 765                 (witness == NULL)? "Dependency": "Failed dependency",
 766                 dep_name(dept));
 767   // print arguments
 768   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
 769   for (int j = 0; j < args->length(); j++) {
 770     DepArgument arg = args->at(j);
 771     bool put_star = false;
 772     if (arg.is_null())  continue;
 773     const char* what;
 774     if (j == ctxkj) {
 775       assert(arg.is_metadata(), "must be");
 776       what = "context";
 777       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
 778     } else if (arg.is_method()) {
 779       what = "method ";
 780       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
 781     } else if (arg.is_klass()) {
 782       what = "class  ";
 783     } else {
 784       what = "object ";
 785     }
 786     st->print("  %s = %s", what, (put_star? "*": ""));
 787     if (arg.is_klass()) {
 788       st->print("%s", ((Klass*)arg.metadata_value())->external_name());
 789     } else if (arg.is_method()) {
 790       ((Method*)arg.metadata_value())->print_value_on(st);
 791     } else if (arg.is_oop()) {
 792       arg.oop_value()->print_value_on(st);
 793     } else {
 794       ShouldNotReachHere(); // Provide impl for this type.
 795     }
 796 
 797     st->cr();
 798   }
 799   if (witness != NULL) {
 800     bool put_star = !Dependencies::is_concrete_klass(witness);
 801     st->print_cr("  witness = %s%s",
 802                   (put_star? "*": ""),
 803                   witness->external_name());
 804   }
 805 }
 806 
 807 void Dependencies::DepStream::log_dependency(Klass* witness) {
 808   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
 809   ResourceMark rm;
 810   const int nargs = argument_count();
 811   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 812   for (int j = 0; j < nargs; j++) {
 813     if (is_oop_argument(j)) {
 814       args->push(argument_oop(j));
 815     } else {
 816       args->push(argument(j));
 817     }
 818   }
 819   int argslen = args->length();
 820   if (_deps != NULL && _deps->log() != NULL) {
 821     if (ciEnv::current() != NULL) {
 822       Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
 823     } else {
 824       // Treat the CompileLog as an xmlstream instead
 825       Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
 826     }
 827   } else {
 828     Dependencies::write_dependency_to(xtty, type(), args, witness);
 829   }
 830   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 831 }
 832 
 833 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) {
 834   ResourceMark rm;
 835   int nargs = argument_count();
 836   GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
 837   for (int j = 0; j < nargs; j++) {
 838     if (is_oop_argument(j)) {
 839       args->push(argument_oop(j));
 840     } else {
 841       args->push(argument(j));
 842     }
 843   }
 844   int argslen = args->length();
 845   Dependencies::print_dependency(type(), args, witness, st);
 846   if (verbose) {
 847     if (_code != NULL) {
 848       st->print("  code: ");
 849       _code->print_value_on(st);
 850       st->cr();
 851     }
 852   }
 853   guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
 854 }
 855 
 856 
 857 /// Dependency stream support (decodes dependencies from an nmethod):
 858 
 859 #ifdef ASSERT
 860 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
 861   assert(must_be_in_vm(), "raw oops here");
 862   _byte_limit = byte_limit;
 863   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
 864   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
 865 }
 866 #endif //ASSERT
 867 
 868 bool Dependencies::DepStream::next() {
 869   assert(_type != end_marker, "already at end");
 870   if (_bytes.position() == 0 && _code != NULL
 871       && _code->dependencies_size() == 0) {
 872     // Method has no dependencies at all.
 873     return false;
 874   }
 875   int code_byte = (_bytes.read_byte() & 0xFF);
 876   if (code_byte == end_marker) {
 877     DEBUG_ONLY(_type = end_marker);
 878     return false;
 879   } else {
 880     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
 881     code_byte -= ctxk_bit;
 882     DepType dept = (DepType)code_byte;
 883     _type = dept;
 884     Dependencies::check_valid_dependency_type(dept);
 885     int stride = _dep_args[dept];
 886     assert(stride == dep_args(dept), "sanity");
 887     int skipj = -1;
 888     if (ctxk_bit != 0) {
 889       skipj = 0;  // currently the only context argument is at zero
 890       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
 891     }
 892     for (int j = 0; j < stride; j++) {
 893       _xi[j] = (j == skipj)? 0: _bytes.read_int();
 894     }
 895     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
 896     return true;
 897   }
 898 }
 899 
 900 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
 901   Metadata* o = NULL;
 902   if (_code != NULL) {
 903     o = _code->metadata_at(i);
 904   } else {
 905     o = _deps->oop_recorder()->metadata_at(i);
 906   }
 907   return o;
 908 }
 909 
 910 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
 911   return (_code != NULL)
 912          ? _code->oop_at(i)
 913     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
 914 }
 915 
 916 Metadata* Dependencies::DepStream::argument(int i) {
 917   Metadata* result = recorded_metadata_at(argument_index(i));
 918 
 919   if (result == NULL) { // Explicit context argument can be compressed
 920     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 921     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
 922       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
 923     }
 924   }
 925 
 926   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
 927   return result;
 928 }
 929 
 930 /**
 931  * Returns a unique identifier for each dependency argument.
 932  */
 933 uintptr_t Dependencies::DepStream::get_identifier(int i) {
 934   if (is_oop_argument(i)) {
 935     return (uintptr_t)(oopDesc*)argument_oop(i);
 936   } else {
 937     return (uintptr_t)argument(i);
 938   }
 939 }
 940 
 941 oop Dependencies::DepStream::argument_oop(int i) {
 942   oop result = recorded_oop_at(argument_index(i));
 943   assert(result == NULL || result->is_oop(), "must be");
 944   return result;
 945 }
 946 
 947 Klass* Dependencies::DepStream::context_type() {
 948   assert(must_be_in_vm(), "raw oops here");
 949 
 950   // Most dependencies have an explicit context type argument.
 951   {
 952     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
 953     if (ctxkj >= 0) {
 954       Metadata* k = argument(ctxkj);
 955       assert(k != NULL && k->is_klass(), "type check");
 956       return (Klass*)k;
 957     }
 958   }
 959 
 960   // Some dependencies are using the klass of the first object
 961   // argument as implicit context type.
 962   {
 963     int ctxkj = dep_implicit_context_arg(type());
 964     if (ctxkj >= 0) {
 965       Klass* k = argument_oop(ctxkj)->klass();
 966       assert(k != NULL && k->is_klass(), "type check");
 967       return (Klass*) k;
 968     }
 969   }
 970 
 971   // And some dependencies don't have a context type at all,
 972   // e.g. evol_method.
 973   return NULL;
 974 }
 975 
 976 // ----------------- DependencySignature --------------------------------------
 977 bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
 978   if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
 979     return false;
 980   }
 981 
 982   for (int i = 0; i < s1.args_count(); i++) {
 983     if (s1.arg(i) != s2.arg(i)) {
 984       return false;
 985     }
 986   }
 987   return true;
 988 }
 989 
 990 /// Checking dependencies:
 991 
 992 // This hierarchy walker inspects subtypes of a given type,
 993 // trying to find a "bad" class which breaks a dependency.
 994 // Such a class is called a "witness" to the broken dependency.
 995 // While searching around, we ignore "participants", which
 996 // are already known to the dependency.
 997 class ClassHierarchyWalker {
 998  public:
 999   enum { PARTICIPANT_LIMIT = 3 };
1000 
1001  private:
1002   // optional method descriptor to check for:
1003   Symbol* _name;
1004   Symbol* _signature;
1005 
1006   // special classes which are not allowed to be witnesses:
1007   Klass*    _participants[PARTICIPANT_LIMIT+1];
1008   int       _num_participants;
1009 
1010   // cache of method lookups
1011   Method* _found_methods[PARTICIPANT_LIMIT+1];
1012 
1013   // if non-zero, tells how many witnesses to convert to participants
1014   int       _record_witnesses;
1015 
1016   void initialize(Klass* participant) {
1017     _record_witnesses = 0;
1018     _participants[0]  = participant;
1019     _found_methods[0] = NULL;
1020     _num_participants = 0;
1021     if (participant != NULL) {
1022       // Terminating NULL.
1023       _participants[1] = NULL;
1024       _found_methods[1] = NULL;
1025       _num_participants = 1;
1026     }
1027   }
1028 
1029   void initialize_from_method(Method* m) {
1030     assert(m != NULL && m->is_method(), "sanity");
1031     _name      = m->name();
1032     _signature = m->signature();
1033   }
1034 
1035  public:
1036   // The walker is initialized to recognize certain methods and/or types
1037   // as friendly participants.
1038   ClassHierarchyWalker(Klass* participant, Method* m) {
1039     initialize_from_method(m);
1040     initialize(participant);
1041   }
1042   ClassHierarchyWalker(Method* m) {
1043     initialize_from_method(m);
1044     initialize(NULL);
1045   }
1046   ClassHierarchyWalker(Klass* participant = NULL) {
1047     _name      = NULL;
1048     _signature = NULL;
1049     initialize(participant);
1050   }
1051 
1052   // This is common code for two searches:  One for concrete subtypes,
1053   // the other for concrete method implementations and overrides.
1054   bool doing_subtype_search() {
1055     return _name == NULL;
1056   }
1057 
1058   int num_participants() { return _num_participants; }
1059   Klass* participant(int n) {
1060     assert((uint)n <= (uint)_num_participants, "oob");
1061     return _participants[n];
1062   }
1063 
1064   // Note:  If n==num_participants, returns NULL.
1065   Method* found_method(int n) {
1066     assert((uint)n <= (uint)_num_participants, "oob");
1067     Method* fm = _found_methods[n];
1068     assert(n == _num_participants || fm != NULL, "proper usage");
1069     if (fm != NULL && fm->method_holder() != _participants[n]) {
1070       // Default methods from interfaces can be added to classes. In
1071       // that case the holder of the method is not the class but the
1072       // interface where it's defined.
1073       assert(fm->is_default_method(), "sanity");
1074       return NULL;
1075     }
1076     return fm;
1077   }
1078 
1079 #ifdef ASSERT
1080   // Assert that m is inherited into ctxk, without intervening overrides.
1081   // (May return true even if this is not true, in corner cases where we punt.)
1082   bool check_method_context(Klass* ctxk, Method* m) {
1083     if (m->method_holder() == ctxk)
1084       return true;  // Quick win.
1085     if (m->is_private())
1086       return false; // Quick lose.  Should not happen.
1087     if (!(m->is_public() || m->is_protected()))
1088       // The override story is complex when packages get involved.
1089       return true;  // Must punt the assertion to true.
1090     Method* lm = ctxk->lookup_method(m->name(), m->signature());
1091     if (lm == NULL && ctxk->is_instance_klass()) {
1092       // It might be an interface method
1093       lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(),
1094                                                                           m->signature());
1095     }
1096     if (lm == m)
1097       // Method m is inherited into ctxk.
1098       return true;
1099     if (lm != NULL) {
1100       if (!(lm->is_public() || lm->is_protected())) {
1101         // Method is [package-]private, so the override story is complex.
1102         return true;  // Must punt the assertion to true.
1103       }
1104       if (lm->is_static()) {
1105         // Static methods don't override non-static so punt
1106         return true;
1107       }
1108       if (!Dependencies::is_concrete_method(lm, ctxk) &&
1109           !Dependencies::is_concrete_method(m, ctxk)) {
1110         // They are both non-concrete
1111         if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1112           // Method m is overridden by lm, but both are non-concrete.
1113           return true;
1114         }
1115         if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1116             ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1117           // Interface method defined in multiple super interfaces
1118           return true;
1119         }
1120       }
1121     }
1122     ResourceMark rm;
1123     tty->print_cr("Dependency method not found in the associated context:");
1124     tty->print_cr("  context = %s", ctxk->external_name());
1125     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
1126     if (lm != NULL) {
1127       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
1128     }
1129     return false;
1130   }
1131 #endif
1132 
1133   void add_participant(Klass* participant) {
1134     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1135     int np = _num_participants++;
1136     _participants[np] = participant;
1137     _participants[np+1] = NULL;
1138     _found_methods[np+1] = NULL;
1139   }
1140 
1141   void record_witnesses(int add) {
1142     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
1143     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1144     _record_witnesses = add;
1145   }
1146 
1147   bool is_witness(Klass* k) {
1148     if (doing_subtype_search()) {
1149       return Dependencies::is_concrete_klass(k);
1150     } else if (!k->is_instance_klass()) {
1151       return false; // no methods to find in an array type
1152     } else {
1153       // Search class hierarchy first.
1154       Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature);
1155       if (!Dependencies::is_concrete_method(m, k)) {
1156         // Check interface defaults also, if any exist.
1157         Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
1158         if (default_methods == NULL)
1159             return false;
1160         m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
1161         if (!Dependencies::is_concrete_method(m, NULL))
1162             return false;
1163       }
1164       _found_methods[_num_participants] = m;
1165       // Note:  If add_participant(k) is called,
1166       // the method m will already be memoized for it.
1167       return true;
1168     }
1169   }
1170 
1171   bool is_participant(Klass* k) {
1172     if (k == _participants[0]) {
1173       return true;
1174     } else if (_num_participants <= 1) {
1175       return false;
1176     } else {
1177       return in_list(k, &_participants[1]);
1178     }
1179   }
1180   bool ignore_witness(Klass* witness) {
1181     if (_record_witnesses == 0) {
1182       return false;
1183     } else {
1184       --_record_witnesses;
1185       add_participant(witness);
1186       return true;
1187     }
1188   }
1189   static bool in_list(Klass* x, Klass** list) {
1190     for (int i = 0; ; i++) {
1191       Klass* y = list[i];
1192       if (y == NULL)  break;
1193       if (y == x)  return true;
1194     }
1195     return false;  // not in list
1196   }
1197 
1198  private:
1199   // the actual search method:
1200   Klass* find_witness_anywhere(Klass* context_type,
1201                                  bool participants_hide_witnesses,
1202                                  bool top_level_call = true);
1203   // the spot-checking version:
1204   Klass* find_witness_in(KlassDepChange& changes,
1205                          Klass* context_type,
1206                            bool participants_hide_witnesses);
1207  public:
1208   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
1209     assert(doing_subtype_search(), "must set up a subtype search");
1210     // When looking for unexpected concrete types,
1211     // do not look beneath expected ones.
1212     const bool participants_hide_witnesses = true;
1213     // CX > CC > C' is OK, even if C' is new.
1214     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
1215     if (changes != NULL) {
1216       return find_witness_in(*changes, context_type, participants_hide_witnesses);
1217     } else {
1218       return find_witness_anywhere(context_type, participants_hide_witnesses);
1219     }
1220   }
1221   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
1222     assert(!doing_subtype_search(), "must set up a method definer search");
1223     // When looking for unexpected concrete methods,
1224     // look beneath expected ones, to see if there are overrides.
1225     const bool participants_hide_witnesses = true;
1226     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1227     if (changes != NULL) {
1228       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
1229     } else {
1230       return find_witness_anywhere(context_type, !participants_hide_witnesses);
1231     }
1232   }
1233 };
1234 
1235 #ifndef PRODUCT
1236 static int deps_find_witness_calls = 0;
1237 static int deps_find_witness_steps = 0;
1238 static int deps_find_witness_recursions = 0;
1239 static int deps_find_witness_singles = 0;
1240 static int deps_find_witness_print = 0; // set to -1 to force a final print
1241 static bool count_find_witness_calls() {
1242   if (TraceDependencies || LogCompilation) {
1243     int pcount = deps_find_witness_print + 1;
1244     bool final_stats      = (pcount == 0);
1245     bool initial_call     = (pcount == 1);
1246     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1247     if (pcount < 0)  pcount = 1; // crude overflow protection
1248     deps_find_witness_print = pcount;
1249     if (VerifyDependencies && initial_call) {
1250       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1251     }
1252     if (occasional_print || final_stats) {
1253       // Every now and then dump a little info about dependency searching.
1254       if (xtty != NULL) {
1255        ttyLocker ttyl;
1256        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1257                    deps_find_witness_calls,
1258                    deps_find_witness_steps,
1259                    deps_find_witness_recursions,
1260                    deps_find_witness_singles);
1261       }
1262       if (final_stats || (TraceDependencies && WizardMode)) {
1263         ttyLocker ttyl;
1264         tty->print_cr("Dependency check (find_witness) "
1265                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1266                       deps_find_witness_calls,
1267                       deps_find_witness_steps,
1268                       (double)deps_find_witness_steps / deps_find_witness_calls,
1269                       deps_find_witness_recursions,
1270                       deps_find_witness_singles);
1271       }
1272     }
1273     return true;
1274   }
1275   return false;
1276 }
1277 #else
1278 #define count_find_witness_calls() (0)
1279 #endif //PRODUCT
1280 
1281 
1282 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1283                                                Klass* context_type,
1284                                                bool participants_hide_witnesses) {
1285   assert(changes.involves_context(context_type), "irrelevant dependency");
1286   Klass* new_type = changes.new_type();
1287 
1288   (void)count_find_witness_calls();
1289   NOT_PRODUCT(deps_find_witness_singles++);
1290 
1291   // Current thread must be in VM (not native mode, as in CI):
1292   assert(must_be_in_vm(), "raw oops here");
1293   // Must not move the class hierarchy during this check:
1294   assert_locked_or_safepoint(Compile_lock);
1295 
1296   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1297   if (nof_impls > 1) {
1298     // Avoid this case: *I.m > { A.m, C }; B.m > C
1299     // %%% Until this is fixed more systematically, bail out.
1300     // See corresponding comment in find_witness_anywhere.
1301     return context_type;
1302   }
1303 
1304   assert(!is_participant(new_type), "only old classes are participants");
1305   if (participants_hide_witnesses) {
1306     // If the new type is a subtype of a participant, we are done.
1307     for (int i = 0; i < num_participants(); i++) {
1308       Klass* part = participant(i);
1309       if (part == NULL)  continue;
1310       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1311              "correct marking of participants, b/c new_type is unique");
1312       if (changes.involves_context(part)) {
1313         // new guy is protected from this check by previous participant
1314         return NULL;
1315       }
1316     }
1317   }
1318 
1319   if (is_witness(new_type) &&
1320       !ignore_witness(new_type)) {
1321     return new_type;
1322   }
1323 
1324   return NULL;
1325 }
1326 
1327 
1328 // Walk hierarchy under a context type, looking for unexpected types.
1329 // Do not report participant types, and recursively walk beneath
1330 // them only if participants_hide_witnesses is false.
1331 // If top_level_call is false, skip testing the context type,
1332 // because the caller has already considered it.
1333 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1334                                                      bool participants_hide_witnesses,
1335                                                      bool top_level_call) {
1336   // Current thread must be in VM (not native mode, as in CI):
1337   assert(must_be_in_vm(), "raw oops here");
1338   // Must not move the class hierarchy during this check:
1339   assert_locked_or_safepoint(Compile_lock);
1340 
1341   bool do_counts = count_find_witness_calls();
1342 
1343   // Check the root of the sub-hierarchy first.
1344   if (top_level_call) {
1345     if (do_counts) {
1346       NOT_PRODUCT(deps_find_witness_calls++);
1347       NOT_PRODUCT(deps_find_witness_steps++);
1348     }
1349     if (is_participant(context_type)) {
1350       if (participants_hide_witnesses)  return NULL;
1351       // else fall through to search loop...
1352     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1353       // The context is an abstract class or interface, to start with.
1354       return context_type;
1355     }
1356   }
1357 
1358   // Now we must check each implementor and each subclass.
1359   // Use a short worklist to avoid blowing the stack.
1360   // Each worklist entry is a *chain* of subklass siblings to process.
1361   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1362   Klass* chains[CHAINMAX];
1363   int    chaini = 0;  // index into worklist
1364   Klass* chain;       // scratch variable
1365 #define ADD_SUBCLASS_CHAIN(k)                     {  \
1366     assert(chaini < CHAINMAX, "oob");                \
1367     chain = k->subklass();                           \
1368     if (chain != NULL)  chains[chaini++] = chain;    }
1369 
1370   // Look for non-abstract subclasses.
1371   // (Note:  Interfaces do not have subclasses.)
1372   ADD_SUBCLASS_CHAIN(context_type);
1373 
1374   // If it is an interface, search its direct implementors.
1375   // (Their subclasses are additional indirect implementors.
1376   // See InstanceKlass::add_implementor.)
1377   // (Note:  nof_implementors is always zero for non-interfaces.)
1378   if (top_level_call) {
1379     int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1380     if (nof_impls > 1) {
1381       // Avoid this case: *I.m > { A.m, C }; B.m > C
1382       // Here, I.m has 2 concrete implementations, but m appears unique
1383       // as A.m, because the search misses B.m when checking C.
1384       // The inherited method B.m was getting missed by the walker
1385       // when interface 'I' was the starting point.
1386       // %%% Until this is fixed more systematically, bail out.
1387       // (Old CHA had the same limitation.)
1388       return context_type;
1389     }
1390     if (nof_impls > 0) {
1391       Klass* impl = InstanceKlass::cast(context_type)->implementor();
1392       assert(impl != NULL, "just checking");
1393       // If impl is the same as the context_type, then more than one
1394       // implementor has seen. No exact info in this case.
1395       if (impl == context_type) {
1396         return context_type;  // report an inexact witness to this sad affair
1397       }
1398       if (do_counts)
1399         { NOT_PRODUCT(deps_find_witness_steps++); }
1400       if (is_participant(impl)) {
1401         if (!participants_hide_witnesses) {
1402           ADD_SUBCLASS_CHAIN(impl);
1403         }
1404       } else if (is_witness(impl) && !ignore_witness(impl)) {
1405         return impl;
1406       } else {
1407         ADD_SUBCLASS_CHAIN(impl);
1408       }
1409     }
1410   }
1411 
1412   // Recursively process each non-trivial sibling chain.
1413   while (chaini > 0) {
1414     Klass* chain = chains[--chaini];
1415     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1416       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1417       if (is_participant(sub)) {
1418         if (participants_hide_witnesses)  continue;
1419         // else fall through to process this guy's subclasses
1420       } else if (is_witness(sub) && !ignore_witness(sub)) {
1421         return sub;
1422       }
1423       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1424         // Fast path.  (Partially disabled if VerifyDependencies.)
1425         ADD_SUBCLASS_CHAIN(sub);
1426       } else {
1427         // Worklist overflow.  Do a recursive call.  Should be rare.
1428         // The recursive call will have its own worklist, of course.
1429         // (Note that sub has already been tested, so that there is
1430         // no need for the recursive call to re-test.  That's handy,
1431         // since the recursive call sees sub as the context_type.)
1432         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1433         Klass* witness = find_witness_anywhere(sub,
1434                                                  participants_hide_witnesses,
1435                                                  /*top_level_call=*/ false);
1436         if (witness != NULL)  return witness;
1437       }
1438     }
1439   }
1440 
1441   // No witness found.  The dependency remains unbroken.
1442   return NULL;
1443 #undef ADD_SUBCLASS_CHAIN
1444 }
1445 
1446 
1447 bool Dependencies::is_concrete_klass(Klass* k) {
1448   if (k->is_abstract())  return false;
1449   // %%% We could treat classes which are concrete but
1450   // have not yet been instantiated as virtually abstract.
1451   // This would require a deoptimization barrier on first instantiation.
1452   //if (k->is_not_instantiated())  return false;
1453   return true;
1454 }
1455 
1456 bool Dependencies::is_concrete_method(Method* m, Klass * k) {
1457   // NULL is not a concrete method,
1458   // statics are irrelevant to virtual call sites,
1459   // abstract methods are not concrete,
1460   // overpass (error) methods are not concrete if k is abstract
1461   //
1462   // note "true" is conservative answer --
1463   //     overpass clause is false if k == NULL, implies return true if
1464   //     answer depends on overpass clause.
1465   return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
1466              (m->is_overpass() && k != NULL && k -> is_abstract()) );
1467 }
1468 
1469 
1470 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1471   if (k->is_interface())  return NULL;
1472   if (k->has_finalizer()) return k;
1473   k = k->subklass();
1474   while (k != NULL) {
1475     Klass* result = find_finalizable_subclass(k);
1476     if (result != NULL) return result;
1477     k = k->next_sibling();
1478   }
1479   return NULL;
1480 }
1481 
1482 
1483 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1484   if (k->is_abstract())  return false;
1485   // We could also return false if k does not yet appear to be
1486   // instantiated, if the VM version supports this distinction also.
1487   //if (k->is_not_instantiated())  return false;
1488   return true;
1489 }
1490 
1491 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1492   return k->has_finalizable_subclass();
1493 }
1494 
1495 
1496 // Any use of the contents (bytecodes) of a method must be
1497 // marked by an "evol_method" dependency, if those contents
1498 // can change.  (Note: A method is always dependent on itself.)
1499 Klass* Dependencies::check_evol_method(Method* m) {
1500   assert(must_be_in_vm(), "raw oops here");
1501   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1502   // Or is there a now a breakpoint?
1503   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1504   if (m->is_old()
1505       || m->number_of_breakpoints() > 0) {
1506     return m->method_holder();
1507   } else {
1508     return NULL;
1509   }
1510 }
1511 
1512 // This is a strong assertion:  It is that the given type
1513 // has no subtypes whatever.  It is most useful for
1514 // optimizing checks on reflected types or on array types.
1515 // (Checks on types which are derived from real instances
1516 // can be optimized more strongly than this, because we
1517 // know that the checked type comes from a concrete type,
1518 // and therefore we can disregard abstract types.)
1519 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1520   assert(must_be_in_vm(), "raw oops here");
1521   assert_locked_or_safepoint(Compile_lock);
1522   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1523   Klass* sub = ctx->subklass();
1524   if (sub != NULL) {
1525     return sub;
1526   } else if (ctx->nof_implementors() != 0) {
1527     // if it is an interface, it must be unimplemented
1528     // (if it is not an interface, nof_implementors is always zero)
1529     Klass* impl = ctx->implementor();
1530     assert(impl != NULL, "must be set");
1531     return impl;
1532   } else {
1533     return NULL;
1534   }
1535 }
1536 
1537 // Test the assertion that conck is the only concrete subtype* of ctxk.
1538 // The type conck itself is allowed to have have further concrete subtypes.
1539 // This allows the compiler to narrow occurrences of ctxk by conck,
1540 // when dealing with the types of actual instances.
1541 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1542                                                                    Klass* conck,
1543                                                                    KlassDepChange* changes) {
1544   ClassHierarchyWalker wf(conck);
1545   return wf.find_witness_subtype(ctxk, changes);
1546 }
1547 
1548 // If a non-concrete class has no concrete subtypes, it is not (yet)
1549 // instantiatable.  This can allow the compiler to make some paths go
1550 // dead, if they are gated by a test of the type.
1551 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1552                                                                KlassDepChange* changes) {
1553   // Find any concrete subtype, with no participants:
1554   ClassHierarchyWalker wf;
1555   return wf.find_witness_subtype(ctxk, changes);
1556 }
1557 
1558 
1559 // If a concrete class has no concrete subtypes, it can always be
1560 // exactly typed.  This allows the use of a cheaper type test.
1561 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1562                                                                KlassDepChange* changes) {
1563   // Find any concrete subtype, with only the ctxk as participant:
1564   ClassHierarchyWalker wf(ctxk);
1565   return wf.find_witness_subtype(ctxk, changes);
1566 }
1567 
1568 
1569 // Find the unique concrete proper subtype of ctxk, or NULL if there
1570 // is more than one concrete proper subtype.  If there are no concrete
1571 // proper subtypes, return ctxk itself, whether it is concrete or not.
1572 // The returned subtype is allowed to have have further concrete subtypes.
1573 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1574 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1575   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1576   wf.record_witnesses(1);          // Record one other witness when walking.
1577   Klass* wit = wf.find_witness_subtype(ctxk);
1578   if (wit != NULL)  return NULL;   // Too many witnesses.
1579   Klass* conck = wf.participant(0);
1580   if (conck == NULL) {
1581 #ifndef PRODUCT
1582     // Make sure the dependency mechanism will pass this discovery:
1583     if (VerifyDependencies) {
1584       // Turn off dependency tracing while actually testing deps.
1585       FlagSetting fs(TraceDependencies, false);
1586       if (!Dependencies::is_concrete_klass(ctxk)) {
1587         guarantee(NULL ==
1588                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
1589                   "verify dep.");
1590       } else {
1591         guarantee(NULL ==
1592                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
1593                   "verify dep.");
1594       }
1595     }
1596 #endif //PRODUCT
1597     return ctxk;                   // Return ctxk as a flag for "no subtypes".
1598   } else {
1599 #ifndef PRODUCT
1600     // Make sure the dependency mechanism will pass this discovery:
1601     if (VerifyDependencies) {
1602       // Turn off dependency tracing while actually testing deps.
1603       FlagSetting fs(TraceDependencies, false);
1604       if (!Dependencies::is_concrete_klass(ctxk)) {
1605         guarantee(NULL == (void *)
1606                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
1607                   "verify dep.");
1608       }
1609     }
1610 #endif //PRODUCT
1611     return conck;
1612   }
1613 }
1614 
1615 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1616 // except possibly for further subtypes of k[12] themselves.
1617 // The context type must be abstract.  The types k1 and k2 are themselves
1618 // allowed to have further concrete subtypes.
1619 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1620                                                 Klass* ctxk,
1621                                                 Klass* k1,
1622                                                 Klass* k2,
1623                                                 KlassDepChange* changes) {
1624   ClassHierarchyWalker wf;
1625   wf.add_participant(k1);
1626   wf.add_participant(k2);
1627   return wf.find_witness_subtype(ctxk, changes);
1628 }
1629 
1630 // Search ctxk for concrete implementations.  If there are klen or fewer,
1631 // pack them into the given array and return the number.
1632 // Otherwise, return -1, meaning the given array would overflow.
1633 // (Note that a return of 0 means there are exactly no concrete subtypes.)
1634 // In this search, if ctxk is concrete, it will be reported alone.
1635 // For any type CC reported, no proper subtypes of CC will be reported.
1636 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1637                                                    int klen,
1638                                                    Klass* karray[]) {
1639   ClassHierarchyWalker wf;
1640   wf.record_witnesses(klen);
1641   Klass* wit = wf.find_witness_subtype(ctxk);
1642   if (wit != NULL)  return -1;  // Too many witnesses.
1643   int num = wf.num_participants();
1644   assert(num <= klen, "oob");
1645   // Pack the result array with the good news.
1646   for (int i = 0; i < num; i++)
1647     karray[i] = wf.participant(i);
1648 #ifndef PRODUCT
1649   // Make sure the dependency mechanism will pass this discovery:
1650   if (VerifyDependencies) {
1651     // Turn off dependency tracing while actually testing deps.
1652     FlagSetting fs(TraceDependencies, false);
1653     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1654     case -1: // ctxk was itself concrete
1655       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1656       break;
1657     case 0:
1658       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1659                 "verify dep.");
1660       break;
1661     case 1:
1662       guarantee(NULL == (void *)
1663                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1664                 "verify dep.");
1665       break;
1666     case 2:
1667       guarantee(NULL == (void *)
1668                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
1669                                                                 karray[0],
1670                                                                 karray[1]),
1671                 "verify dep.");
1672       break;
1673     default:
1674       ShouldNotReachHere();  // klen > 2 yet supported
1675     }
1676   }
1677 #endif //PRODUCT
1678   return num;
1679 }
1680 
1681 // If a class (or interface) has a unique concrete method uniqm, return NULL.
1682 // Otherwise, return a class that contains an interfering method.
1683 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1684                                                     KlassDepChange* changes) {
1685   // Here is a missing optimization:  If uniqm->is_final(),
1686   // we don't really need to search beneath it for overrides.
1687   // This is probably not important, since we don't use dependencies
1688   // to track final methods.  (They can't be "definalized".)
1689   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1690   return wf.find_witness_definer(ctxk, changes);
1691 }
1692 
1693 // Find the set of all non-abstract methods under ctxk that match m.
1694 // (The method m must be defined or inherited in ctxk.)
1695 // Include m itself in the set, unless it is abstract.
1696 // If this set has exactly one element, return that element.
1697 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1698   // Return NULL if m is marked old; must have been a redefined method.
1699   if (m->is_old()) {
1700     return NULL;
1701   }
1702   ClassHierarchyWalker wf(m);
1703   assert(wf.check_method_context(ctxk, m), "proper context");
1704   wf.record_witnesses(1);
1705   Klass* wit = wf.find_witness_definer(ctxk);
1706   if (wit != NULL)  return NULL;  // Too many witnesses.
1707   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1708   if (Dependencies::is_concrete_method(m, ctxk)) {
1709     if (fm == NULL) {
1710       // It turns out that m was always the only implementation.
1711       fm = m;
1712     } else if (fm != m) {
1713       // Two conflicting implementations after all.
1714       // (This can happen if m is inherited into ctxk and fm overrides it.)
1715       return NULL;
1716     }
1717   }
1718 #ifndef PRODUCT
1719   // Make sure the dependency mechanism will pass this discovery:
1720   if (VerifyDependencies && fm != NULL) {
1721     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1722               "verify dep.");
1723   }
1724 #endif //PRODUCT
1725   return fm;
1726 }
1727 
1728 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1729                                                         Method* m1,
1730                                                         Method* m2,
1731                                                         KlassDepChange* changes) {
1732   ClassHierarchyWalker wf(m1);
1733   wf.add_participant(m1->method_holder());
1734   wf.add_participant(m2->method_holder());
1735   return wf.find_witness_definer(ctxk, changes);
1736 }
1737 
1738 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1739   Klass* search_at = ctxk;
1740   if (changes != NULL)
1741     search_at = changes->new_type(); // just look at the new bit
1742   return find_finalizable_subclass(search_at);
1743 }
1744 
1745 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1746   assert(!oopDesc::is_null(call_site), "sanity");
1747   assert(!oopDesc::is_null(method_handle), "sanity");
1748   assert(call_site->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1749 
1750   if (changes == NULL) {
1751     // Validate all CallSites
1752     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1753       return call_site->klass();  // assertion failed
1754   } else {
1755     // Validate the given CallSite
1756     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1757       assert(method_handle != changes->method_handle(), "must be");
1758       return call_site->klass();  // assertion failed
1759     }
1760   }
1761   return NULL;  // assertion still valid
1762 }
1763 
1764 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1765   if (witness != NULL) {
1766     if (TraceDependencies) {
1767       print_dependency(witness, /*verbose=*/ true);
1768     }
1769     // The following is a no-op unless logging is enabled:
1770     log_dependency(witness);
1771   }
1772 }
1773 
1774 
1775 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1776   assert_locked_or_safepoint(Compile_lock);
1777   Dependencies::check_valid_dependency_type(type());
1778 
1779   Klass* witness = NULL;
1780   switch (type()) {
1781   case evol_method:
1782     witness = check_evol_method(method_argument(0));
1783     break;
1784   case leaf_type:
1785     witness = check_leaf_type(context_type());
1786     break;
1787   case abstract_with_unique_concrete_subtype:
1788     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1789     break;
1790   case abstract_with_no_concrete_subtype:
1791     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1792     break;
1793   case concrete_with_no_concrete_subtype:
1794     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1795     break;
1796   case unique_concrete_method:
1797     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1798     break;
1799   case abstract_with_exclusive_concrete_subtypes_2:
1800     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1801     break;
1802   case exclusive_concrete_methods_2:
1803     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1804     break;
1805   case no_finalizable_subclasses:
1806     witness = check_has_no_finalizable_subclasses(context_type(), changes);
1807     break;
1808   default:
1809     witness = NULL;
1810     break;
1811   }
1812   trace_and_log_witness(witness);
1813   return witness;
1814 }
1815 
1816 
1817 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1818   assert_locked_or_safepoint(Compile_lock);
1819   Dependencies::check_valid_dependency_type(type());
1820 
1821   Klass* witness = NULL;
1822   switch (type()) {
1823   case call_site_target_value:
1824     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1825     break;
1826   default:
1827     witness = NULL;
1828     break;
1829   }
1830   trace_and_log_witness(witness);
1831   return witness;
1832 }
1833 
1834 
1835 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1836   // Handle klass dependency
1837   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1838     return check_klass_dependency(changes.as_klass_change());
1839 
1840   // Handle CallSite dependency
1841   if (changes.is_call_site_change())
1842     return check_call_site_dependency(changes.as_call_site_change());
1843 
1844   // irrelevant dependency; skip it
1845   return NULL;
1846 }
1847 
1848 
1849 void DepChange::print() {
1850   int nsup = 0, nint = 0;
1851   for (ContextStream str(*this); str.next(); ) {
1852     Klass* k = str.klass();
1853     switch (str.change_type()) {
1854     case Change_new_type:
1855       tty->print_cr("  dependee = %s", k->external_name());
1856       break;
1857     case Change_new_sub:
1858       if (!WizardMode) {
1859         ++nsup;
1860       } else {
1861         tty->print_cr("  context super = %s", k->external_name());
1862       }
1863       break;
1864     case Change_new_impl:
1865       if (!WizardMode) {
1866         ++nint;
1867       } else {
1868         tty->print_cr("  context interface = %s", k->external_name());
1869       }
1870       break;
1871     default:
1872       break;
1873     }
1874   }
1875   if (nsup + nint != 0) {
1876     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1877   }
1878 }
1879 
1880 void DepChange::ContextStream::start() {
1881   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1882   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1883   _klass = new_type;
1884   _ti_base = NULL;
1885   _ti_index = 0;
1886   _ti_limit = 0;
1887 }
1888 
1889 bool DepChange::ContextStream::next() {
1890   switch (_change_type) {
1891   case Start_Klass:             // initial state; _klass is the new type
1892     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1893     _ti_index = 0;
1894     _change_type = Change_new_type;
1895     return true;
1896   case Change_new_type:
1897     // fall through:
1898     _change_type = Change_new_sub;
1899   case Change_new_sub:
1900     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1901     {
1902       _klass = _klass->super();
1903       if (_klass != NULL) {
1904         return true;
1905       }
1906     }
1907     // else set up _ti_limit and fall through:
1908     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1909     _change_type = Change_new_impl;
1910   case Change_new_impl:
1911     if (_ti_index < _ti_limit) {
1912       _klass = _ti_base->at(_ti_index++);
1913       return true;
1914     }
1915     // fall through:
1916     _change_type = NO_CHANGE;  // iterator is exhausted
1917   case NO_CHANGE:
1918     break;
1919   default:
1920     ShouldNotReachHere();
1921   }
1922   return false;
1923 }
1924 
1925 void KlassDepChange::initialize() {
1926   // entire transaction must be under this lock:
1927   assert_lock_strong(Compile_lock);
1928 
1929   // Mark all dependee and all its superclasses
1930   // Mark transitive interfaces
1931   for (ContextStream str(*this); str.next(); ) {
1932     Klass* d = str.klass();
1933     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1934     InstanceKlass::cast(d)->set_is_marked_dependent(true);
1935   }
1936 }
1937 
1938 KlassDepChange::~KlassDepChange() {
1939   // Unmark all dependee and all its superclasses
1940   // Unmark transitive interfaces
1941   for (ContextStream str(*this); str.next(); ) {
1942     Klass* d = str.klass();
1943     InstanceKlass::cast(d)->set_is_marked_dependent(false);
1944   }
1945 }
1946 
1947 bool KlassDepChange::involves_context(Klass* k) {
1948   if (k == NULL || !k->is_instance_klass()) {
1949     return false;
1950   }
1951   InstanceKlass* ik = InstanceKlass::cast(k);
1952   bool is_contained = ik->is_marked_dependent();
1953   assert(is_contained == new_type()->is_subtype_of(k),
1954          "correct marking of potential context types");
1955   return is_contained;
1956 }
1957 
1958 #ifndef PRODUCT
1959 void Dependencies::print_statistics() {
1960   if (deps_find_witness_print != 0) {
1961     // Call one final time, to flush out the data.
1962     deps_find_witness_print = -1;
1963     count_find_witness_calls();
1964   }
1965 }
1966 #endif
1967 
1968 CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
1969   _call_site(call_site),
1970   _method_handle(method_handle) {
1971   assert(_call_site()->is_a(SystemDictionary::CallSite_klass()), "must be");
1972   assert(_method_handle.is_null() || _method_handle()->is_a(SystemDictionary::MethodHandle_klass()), "must be");
1973 }