< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page
rev 56101 : 8227745: Enable Escape Analysis for better performance when debugging
Reviewed-by: ???


  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "oops/constantPool.hpp"
  42 #include "oops/method.hpp"
  43 #include "oops/objArrayKlass.hpp"
  44 #include "oops/objArrayOop.inline.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "oops/fieldStreams.hpp"
  47 #include "oops/typeArrayOop.inline.hpp"
  48 #include "oops/verifyOopClosure.hpp"
  49 #include "prims/jvmtiThreadState.hpp"
  50 #include "runtime/biasedLocking.hpp"
  51 #include "runtime/compilationPolicy.hpp"
  52 #include "runtime/deoptimization.hpp"
  53 #include "runtime/fieldDescriptor.hpp"
  54 #include "runtime/fieldDescriptor.inline.hpp"
  55 #include "runtime/frame.inline.hpp"
  56 #include "runtime/jniHandles.inline.hpp"
  57 #include "runtime/handles.inline.hpp"
  58 #include "runtime/interfaceSupport.inline.hpp"

  59 #include "runtime/safepointVerifiers.hpp"
  60 #include "runtime/sharedRuntime.hpp"
  61 #include "runtime/signature.hpp"
  62 #include "runtime/stubRoutines.hpp"
  63 #include "runtime/thread.hpp"
  64 #include "runtime/threadSMR.hpp"
  65 #include "runtime/vframe.hpp"
  66 #include "runtime/vframeArray.hpp"
  67 #include "runtime/vframe_hp.hpp"
  68 #include "utilities/events.hpp"
  69 #include "utilities/preserveException.hpp"
  70 #include "utilities/xmlstream.hpp"
  71 
  72 
  73 bool DeoptimizationMarker::_is_active = false;
  74 
  75 Deoptimization::UnrollBlock::UnrollBlock(int  size_of_deoptimized_frame,
  76                                          int  caller_adjustment,
  77                                          int  caller_actual_parameters,
  78                                          int  number_of_frames,


 141 // of previously eliminated objects occurs in realloc_objects, which is
 142 // called from the method fetch_unroll_info_helper below.
 143 JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* thread, int exec_mode))
 144   // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
 145   // but makes the entry a little slower. There is however a little dance we have to
 146   // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
 147 
 148   // fetch_unroll_info() is called at the beginning of the deoptimization
 149   // handler. Note this fact before we start generating temporary frames
 150   // that can confuse an asynchronous stack walker. This counter is
 151   // decremented at the end of unpack_frames().
 152   if (TraceDeoptimization) {
 153     tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(thread));
 154   }
 155   thread->inc_in_deopt_handler();
 156 
 157   return fetch_unroll_info_helper(thread, exec_mode);
 158 JRT_END
 159 
 160 
 161 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
 162 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
 163 
 164   // Note: there is a safepoint safety issue here. No matter whether we enter
 165   // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
 166   // the vframeArray is created.
 167   //
 168 
 169   // Allocate our special deoptimization ResourceMark
 170   DeoptResourceMark* dmark = new DeoptResourceMark(thread);
 171   assert(thread->deopt_mark() == NULL, "Pending deopt!");
 172   thread->set_deopt_mark(dmark);
 173 
 174   frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect
 175   RegisterMap map(thread, true);
 176   RegisterMap dummy_map(thread, false);
 177   // Now get the deoptee with a valid map
 178   frame deoptee = stub_frame.sender(&map);
 179   // Set the deoptee nmethod
 180   assert(thread->deopt_compiled_method() == NULL, "Pending deopt!");
 181   CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
 182   thread->set_deopt_compiled_method(cm);
 183 
 184   if (VerifyStack) {
 185     thread->validate_frame_layout();
 186   }
 187 
 188   // Create a growable array of VFrames where each VFrame represents an inlined
 189   // Java frame.  This storage is allocated with the usual system arena.
 190   assert(deoptee.is_compiled_frame(), "Wrong frame type");
 191   GrowableArray<compiledVFrame*>* chunk = new GrowableArray<compiledVFrame*>(10);
 192   vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
 193   while (!vf->is_top()) {
 194     assert(vf->is_compiled_frame(), "Wrong frame type");
 195     chunk->push(compiledVFrame::cast(vf));
 196     vf = vf->sender();
 197   }
 198   assert(vf->is_compiled_frame(), "Wrong frame type");
 199   chunk->push(compiledVFrame::cast(vf));
 200 
 201   bool realloc_failures = false;

 202 
 203 #if COMPILER2_OR_JVMCI
 204   // Reallocate the non-escaping objects and restore their fields. Then
 205   // relock objects if synchronization on them was eliminated.
 206 #if !INCLUDE_JVMCI
 207   if (DoEscapeAnalysis || EliminateNestedLocks) {
 208     if (EliminateAllocations) {
 209 #endif // INCLUDE_JVMCI
 210       assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
 211       GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
 212 







 213       // The flag return_oop() indicates call sites which return oop
 214       // in compiled code. Such sites include java method calls,
 215       // runtime calls (for example, used to allocate new objects/arrays
 216       // on slow code path) and any other calls generated in compiled code.
 217       // It is not guaranteed that we can get such information here only
 218       // by analyzing bytecode in deoptimized frames. This is why this flag
 219       // is set during method compilation (see Compile::Process_OopMap_Node()).
 220       // If the previous frame was popped or if we are dispatching an exception,
 221       // we don't have an oop result.
 222       bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 223       Handle return_value;
 224       if (save_oop_result) {
 225         // Reallocation may trigger GC. If deoptimization happened on return from
 226         // call which returns oop we need to save it since it is not in oopmap.
 227         oop result = deoptee.saved_oop_result(&map);
 228         assert(oopDesc::is_oop_or_null(result), "must be oop");
 229         return_value = Handle(thread, result);
 230         assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 231         if (TraceDeoptimization) {
 232           ttyLocker ttyl;
 233           tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 234         }
 235       }
 236       if (objects != NULL) {
 237         JRT_BLOCK
 238           realloc_failures = realloc_objects(thread, &deoptee, &map, objects, THREAD);
 239         JRT_END












 240         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 241         reassign_fields(&deoptee, &map, objects, realloc_failures, skip_internal);

 242 #ifndef PRODUCT
 243         if (TraceDeoptimization) {
 244           ttyLocker ttyl;
 245           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 246           print_objects(objects, realloc_failures);
 247         }
 248 #endif
 249       }
 250       if (save_oop_result) {
 251         // Restore result.
 252         deoptee.set_saved_oop_result(&map, return_value());
 253       }
 254 #if !INCLUDE_JVMCI
 255     }
 256     if (EliminateLocks) {
 257 #endif // INCLUDE_JVMCI
 258 #ifndef PRODUCT
 259       bool first = true;
 260 #endif
 261       for (int i = 0; i < chunk->length(); i++) {
 262         compiledVFrame* cvf = chunk->at(i);
 263         assert (cvf->scope() != NULL,"expect only compiled java frames");
 264         GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 265         if (monitors->is_nonempty()) {
 266           relock_objects(monitors, thread, realloc_failures);

 267 #ifndef PRODUCT
 268           if (PrintDeoptimizationDetails) {
 269             ttyLocker ttyl;
 270             for (int j = 0; j < monitors->length(); j++) {
 271               MonitorInfo* mi = monitors->at(j);
 272               if (mi->eliminated()) {
 273                 if (first) {
 274                   first = false;
 275                   tty->print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 276                 }







 277                 if (mi->owner_is_scalar_replaced()) {
 278                   Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
 279                   tty->print_cr("     failed reallocation for klass %s", k->external_name());
 280                 } else {
 281                   tty->print_cr("     object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
 282                 }
 283               }
 284             }
 285           }
 286 #endif // !PRODUCT
 287         }
 288       }
 289 #if !INCLUDE_JVMCI
 290     }
 291   }
 292 #endif // INCLUDE_JVMCI


























































 293 #endif // COMPILER2_OR_JVMCI
 294 
 295   ScopeDesc* trap_scope = chunk->at(0)->scope();
 296   Handle exceptionObject;
 297   if (trap_scope->rethrow_exception()) {
 298     if (PrintDeoptimizationDetails) {
 299       tty->print_cr("Exception to be rethrown in the interpreter for method %s::%s at bci %d", trap_scope->method()->method_holder()->name()->as_C_string(), trap_scope->method()->name()->as_C_string(), trap_scope->bci());
 300     }
 301     GrowableArray<ScopeValue*>* expressions = trap_scope->expressions();
 302     guarantee(expressions != NULL && expressions->length() > 0, "must have exception to throw");
 303     ScopeValue* topOfStack = expressions->top();
 304     exceptionObject = StackValue::create_stack_value(&deoptee, &map, topOfStack)->get_obj();
 305     guarantee(exceptionObject() != NULL, "exception oop can not be null");
 306   }
 307 
 308   // Ensure that no safepoint is taken after pointers have been stored
 309   // in fields of rematerialized objects.  If a safepoint occurs from here on
 310   // out the java state residing in the vframeArray will be missed.
 311   NoSafepointVerifier no_safepoint;
 312 


 323   // Now that the vframeArray has been created if we have any deferred local writes
 324   // added by jvmti then we can free up that structure as the data is now in the
 325   // vframeArray
 326 
 327   if (thread->deferred_locals() != NULL) {
 328     GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
 329     int i = 0;
 330     do {
 331       // Because of inlining we could have multiple vframes for a single frame
 332       // and several of the vframes could have deferred writes. Find them all.
 333       if (list->at(i)->id() == array->original().id()) {
 334         jvmtiDeferredLocalVariableSet* dlv = list->at(i);
 335         list->remove_at(i);
 336         // individual jvmtiDeferredLocalVariableSet are CHeapObj's
 337         delete dlv;
 338       } else {
 339         i++;
 340       }
 341     } while ( i < list->length() );
 342     if (list->length() == 0) {
 343       thread->set_deferred_locals(NULL);
 344       // free the list and elements back to C heap.
 345       delete list;

 346     }
 347 
 348   }
 349 
 350   // Compute the caller frame based on the sender sp of stub_frame and stored frame sizes info.
 351   CodeBlob* cb = stub_frame.cb();
 352   // Verify we have the right vframeArray
 353   assert(cb->frame_size() >= 0, "Unexpected frame size");
 354   intptr_t* unpack_sp = stub_frame.sp() + cb->frame_size();
 355 
 356   // If the deopt call site is a MethodHandle invoke call site we have
 357   // to adjust the unpack_sp.
 358   nmethod* deoptee_nm = deoptee.cb()->as_nmethod_or_null();
 359   if (deoptee_nm != NULL && deoptee_nm->is_method_handle_return(deoptee.pc()))
 360     unpack_sp = deoptee.unextended_sp();
 361 
 362 #ifdef ASSERT
 363   assert(cb->is_deoptimization_stub() ||
 364          cb->is_uncommon_trap_stub() ||
 365          strcmp("Stub<DeoptimizationStub.deoptimizationHandler>", cb->name()) == 0 ||


 885         delete s;
 886       }
 887     }
 888     return _singleton;
 889   }
 890   oop lookup_raw(intptr_t raw_value) {
 891     // Have to cast to avoid little/big-endian problems.
 892     jboolean value = (jboolean)*((jint*)&raw_value);
 893     return lookup(value);
 894   }
 895   oop lookup(jboolean value) {
 896     if (value != 0) {
 897       return JNIHandles::resolve_non_null(_true_cache);
 898     }
 899     return JNIHandles::resolve_non_null(_false_cache);
 900   }
 901 };
 902 
 903 BooleanBoxCache* BooleanBoxCache::_singleton = NULL;
 904 
 905 oop Deoptimization::get_cached_box(AutoBoxObjectValue* bv, frame* fr, RegisterMap* reg_map, TRAPS) {
 906    Klass* k = java_lang_Class::as_Klass(bv->klass()->as_ConstantOopReadValue()->value()());
 907    BasicType box_type = SystemDictionary::box_klass_type(k);
 908    if (box_type != T_OBJECT) {
 909      StackValue* value = StackValue::create_stack_value(fr, reg_map, bv->field_at(box_type == T_LONG ? 1 : 0));
 910      switch(box_type) {
 911        case T_INT:     return IntegerBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 912        case T_CHAR:    return CharacterBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 913        case T_SHORT:   return ShortBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 914        case T_BYTE:    return ByteBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 915        case T_BOOLEAN: return BooleanBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 916        case T_LONG:    return LongBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 917        default:;
 918      }
 919    }
 920    return NULL;
 921 }
 922 #endif // INCLUDE_JVMCI || INCLUDE_AOT
 923 
 924 #if COMPILER2_OR_JVMCI
 925 bool Deoptimization::realloc_objects(JavaThread* thread, frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, TRAPS) {
 926   Handle pending_exception(THREAD, thread->pending_exception());
 927   const char* exception_file = thread->exception_file();
 928   int exception_line = thread->exception_line();
 929   thread->clear_pending_exception();
 930 
 931   bool failures = false;
 932 
 933   for (int i = 0; i < objects->length(); i++) {
 934     assert(objects->at(i)->is_object(), "invalid debug information");
 935     ObjectValue* sv = (ObjectValue*) objects->at(i);
 936 
 937     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 938     oop obj = NULL;
 939 
 940     if (k->is_instance_klass()) {
 941 #if INCLUDE_JVMCI || INCLUDE_AOT
 942       CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
 943       if (cm->is_compiled_by_jvmci() && sv->is_auto_box()) {
 944         AutoBoxObjectValue* abv = (AutoBoxObjectValue*) sv;
 945         obj = get_cached_box(abv, fr, reg_map, THREAD);


 966     if (obj == NULL) {
 967       failures = true;
 968     }
 969 
 970     assert(sv->value().is_null(), "redundant reallocation");
 971     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
 972     CLEAR_PENDING_EXCEPTION;
 973     sv->set_value(obj);
 974   }
 975 
 976   if (failures) {
 977     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
 978   } else if (pending_exception.not_null()) {
 979     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
 980   }
 981 
 982   return failures;
 983 }
 984 
 985 // restore elements of an eliminated type array
 986 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
 987   int index = 0;
 988   intptr_t val;
 989 
 990   for (int i = 0; i < sv->field_size(); i++) {
 991     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
 992     switch(type) {
 993     case T_LONG: case T_DOUBLE: {
 994       assert(value->type() == T_INT, "Agreement.");
 995       StackValue* low =
 996         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
 997 #ifdef _LP64
 998       jlong res = (jlong)low->get_int();
 999 #else
1000 #ifdef SPARC
1001       // For SPARC we have to swap high and low words.
1002       jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int());
1003 #else
1004       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());
1005 #endif //SPARC
1006 #endif


1063       assert(value->type() == T_INT, "Agreement.");
1064       val = value->get_int();
1065       obj->byte_at_put(index, (jbyte)*((jint*)&val));
1066       break;
1067 
1068     case T_BOOLEAN:
1069       assert(value->type() == T_INT, "Agreement.");
1070       val = value->get_int();
1071       obj->bool_at_put(index, (jboolean)*((jint*)&val));
1072       break;
1073 
1074       default:
1075         ShouldNotReachHere();
1076     }
1077     index++;
1078   }
1079 }
1080 
1081 
1082 // restore fields of an eliminated object array
1083 void Deoptimization::reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj) {
1084   for (int i = 0; i < sv->field_size(); i++) {
1085     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1086     assert(value->type() == T_OBJECT, "object element expected");
1087     obj->obj_at_put(i, value->get_obj()());
1088   }
1089 }
1090 
1091 class ReassignedField {
1092 public:
1093   int _offset;
1094   BasicType _type;
1095 public:
1096   ReassignedField() {
1097     _offset = 0;
1098     _type = T_ILLEGAL;
1099   }
1100 };
1101 
1102 int compare(ReassignedField* left, ReassignedField* right) {
1103   return left->_offset - right->_offset;
1104 }
1105 
1106 // Restore fields of an eliminated instance object using the same field order
1107 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
1108 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
1109   if (klass->superklass() != NULL) {
1110     svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal);
1111   }
1112 
1113   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
1114   for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
1115     if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
1116       ReassignedField field;
1117       field._offset = fs.offset();
1118       field._type = FieldType::basic_type(fs.signature());
1119       fields->append(field);
1120     }
1121   }
1122   fields->sort(compare);
1123   for (int i = 0; i < fields->length(); i++) {
1124     intptr_t val;
1125     ScopeValue* scope_field = sv->field_at(svIndex);
1126     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
1127     int offset = fields->at(i)._offset;
1128     BasicType type = fields->at(i)._type;


1196         assert(value->type() == T_INT, "Agreement.");
1197         val = value->get_int();
1198         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1199         break;
1200 
1201       case T_BOOLEAN:
1202         assert(value->type() == T_INT, "Agreement.");
1203         val = value->get_int();
1204         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1205         break;
1206 
1207       default:
1208         ShouldNotReachHere();
1209     }
1210     svIndex++;
1211   }
1212   return svIndex;
1213 }
1214 
1215 // restore fields of all eliminated objects and arrays
1216 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1217   for (int i = 0; i < objects->length(); i++) {
1218     ObjectValue* sv = (ObjectValue*) objects->at(i);
1219     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1220     Handle obj = sv->value();
1221     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1222     if (PrintDeoptimizationDetails) {
1223       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1224     }
1225     if (obj.is_null()) {
1226       continue;
1227     }
1228 #if INCLUDE_JVMCI || INCLUDE_AOT
1229     // Don't reassign fields of boxes that came from a cache. Caches may be in CDS.
1230     if (sv->is_auto_box() && ((AutoBoxObjectValue*) sv)->is_cached()) {
1231       continue;
1232     }
1233 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1234     if (k->is_instance_klass()) {
1235       InstanceKlass* ik = InstanceKlass::cast(k);
1236       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);
1237     } else if (k->is_typeArray_klass()) {
1238       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1239       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1240     } else if (k->is_objArray_klass()) {
1241       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1242     }
1243   }
1244 }
1245 
1246 
1247 // relock objects for which synchronization was eliminated
1248 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {


1249   for (int i = 0; i < monitors->length(); i++) {
1250     MonitorInfo* mon_info = monitors->at(i);
1251     if (mon_info->eliminated()) {
1252       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");

1253       if (!mon_info->owner_is_scalar_replaced()) {
1254         Handle obj(thread, mon_info->owner());
1255         markWord mark = obj->mark();
1256         if (UseBiasedLocking && mark.has_bias_pattern()) {
1257           // New allocated objects may have the mark set to anonymously biased.
1258           // Also the deoptimized method may called methods with synchronization
1259           // where the thread-local object is bias locked to the current thread.
1260           assert(mark.is_biased_anonymously() ||
1261                  mark.biased_locker() == thread, "should be locked to current thread");
1262           // Reset mark word to unbiased prototype.
1263           markWord unbiased_prototype = markWord::prototype().set_age(mark.age());
1264           obj->set_mark(unbiased_prototype);


















1265         }
1266         BasicLock* lock = mon_info->lock();
1267         ObjectSynchronizer::enter(obj, lock, thread);
1268         assert(mon_info->owner()->is_locked(), "object must be locked now");
1269       }
1270     }
1271   }

1272 }
1273 
1274 
1275 #ifndef PRODUCT
1276 // print information about reallocated objects
1277 void Deoptimization::print_objects(GrowableArray<ScopeValue*>* objects, bool realloc_failures) {
1278   fieldDescriptor fd;
1279 
1280   for (int i = 0; i < objects->length(); i++) {
1281     ObjectValue* sv = (ObjectValue*) objects->at(i);
1282     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1283     Handle obj = sv->value();
1284 
1285     tty->print("     object <" INTPTR_FORMAT "> of type ", p2i(sv->value()()));
1286     k->print_value();
1287     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1288     if (obj.is_null()) {
1289       tty->print(" allocation failed");
1290     } else {
1291       tty->print(" allocated (%d bytes)", obj->size() * HeapWordSize);


2430             if (bc_case == BC_CASE_LIMIT && (int)bc == 0)
2431               bc = Bytecodes::_illegal;
2432             sprintf(name, "%s/%s/%s",
2433                     trap_reason_name(reason),
2434                     trap_action_name(action),
2435                     Bytecodes::is_defined(bc)? Bytecodes::name(bc): "other");
2436             juint r = counter >> LSB_BITS;
2437             tty->print_cr("  %40s: " UINT32_FORMAT " (%.1f%%)", name, r, (r * 100.0) / total);
2438             account -= r;
2439           }
2440         }
2441       }
2442     }
2443     if (account != 0) {
2444       PRINT_STAT_LINE("unaccounted", account);
2445     }
2446     #undef PRINT_STAT_LINE
2447     if (xtty != NULL)  xtty->tail("statistics");
2448   }
2449 }








































































































































































































































































































































































































2450 #else // COMPILER2_OR_JVMCI
2451 
2452 
2453 // Stubs for C1 only system.
2454 bool Deoptimization::trap_state_is_recompiled(int trap_state) {
2455   return false;
2456 }
2457 
2458 const char* Deoptimization::trap_reason_name(int reason) {
2459   return "unknown";
2460 }
2461 
2462 void Deoptimization::print_statistics() {
2463   // no output
2464 }
2465 
2466 void
2467 Deoptimization::update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason) {
2468   // no udpate
2469 }


  39 #include "memory/resourceArea.hpp"
  40 #include "memory/universe.hpp"
  41 #include "oops/constantPool.hpp"
  42 #include "oops/method.hpp"
  43 #include "oops/objArrayKlass.hpp"
  44 #include "oops/objArrayOop.inline.hpp"
  45 #include "oops/oop.inline.hpp"
  46 #include "oops/fieldStreams.hpp"
  47 #include "oops/typeArrayOop.inline.hpp"
  48 #include "oops/verifyOopClosure.hpp"
  49 #include "prims/jvmtiThreadState.hpp"
  50 #include "runtime/biasedLocking.hpp"
  51 #include "runtime/compilationPolicy.hpp"
  52 #include "runtime/deoptimization.hpp"
  53 #include "runtime/fieldDescriptor.hpp"
  54 #include "runtime/fieldDescriptor.inline.hpp"
  55 #include "runtime/frame.inline.hpp"
  56 #include "runtime/jniHandles.inline.hpp"
  57 #include "runtime/handles.inline.hpp"
  58 #include "runtime/interfaceSupport.inline.hpp"
  59 #include "runtime/objectMonitor.inline.hpp"
  60 #include "runtime/safepointVerifiers.hpp"
  61 #include "runtime/sharedRuntime.hpp"
  62 #include "runtime/signature.hpp"
  63 #include "runtime/stubRoutines.hpp"
  64 #include "runtime/thread.hpp"
  65 #include "runtime/threadSMR.hpp"
  66 #include "runtime/vframe.hpp"
  67 #include "runtime/vframeArray.hpp"
  68 #include "runtime/vframe_hp.hpp"
  69 #include "utilities/events.hpp"
  70 #include "utilities/preserveException.hpp"
  71 #include "utilities/xmlstream.hpp"
  72 
  73 
  74 bool DeoptimizationMarker::_is_active = false;
  75 
  76 Deoptimization::UnrollBlock::UnrollBlock(int  size_of_deoptimized_frame,
  77                                          int  caller_adjustment,
  78                                          int  caller_actual_parameters,
  79                                          int  number_of_frames,


 142 // of previously eliminated objects occurs in realloc_objects, which is
 143 // called from the method fetch_unroll_info_helper below.
 144 JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* thread, int exec_mode))
 145   // It is actually ok to allocate handles in a leaf method. It causes no safepoints,
 146   // but makes the entry a little slower. There is however a little dance we have to
 147   // do in debug mode to get around the NoHandleMark code in the JRT_LEAF macro
 148 
 149   // fetch_unroll_info() is called at the beginning of the deoptimization
 150   // handler. Note this fact before we start generating temporary frames
 151   // that can confuse an asynchronous stack walker. This counter is
 152   // decremented at the end of unpack_frames().
 153   if (TraceDeoptimization) {
 154     tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(thread));
 155   }
 156   thread->inc_in_deopt_handler();
 157 
 158   return fetch_unroll_info_helper(thread, exec_mode);
 159 JRT_END
 160 
 161 







 162 




 163 
 164 #if COMPILER2_OR_JVMCI
 165 // Deoptimize objects, that is reallocate and relock them. Either because the holding
 166 // compiled frame is being replaced by corresponding interpreter frames or because
 167 // they are about to escape through JVMTI (exec_mode == Unpack_none).
 168 bool Deoptimization::deoptimize_objects(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool& realloc_failures, int exec_mode) {
 169   bool deoptimized_objects = false;







 170 
 171   NOT_JVMCI(if (DoEscapeAnalysis || EliminateNestedLocks))
 172   {
 173     frame deoptee = chunk->at(0)->fr();
 174     JavaThread* deoptee_thread = chunk->at(0)->thread();
 175     const RegisterMap* map = chunk->at(0)->register_map();







 176 
 177     assert(!JVMTIEscapeBarrier::objs_are_deoptimized(deoptee_thread, deoptee.id()), "must relock just once");
 178     assert(exec_mode == Unpack_none || (deoptee_thread == thread), "a frame can only be deoptimized by the owner thread");
 179 
 180     NOT_JVMCI(if (EliminateAllocations))
 181     {





 182       assert (chunk->at(0)->scope() != NULL,"expect only compiled java frames");
 183       GrowableArray<ScopeValue*>* objects = chunk->at(0)->scope()->objects();
 184 
 185       if (objects != NULL) {
 186         if (exec_mode == Unpack_none) {
 187           assert(thread->thread_state() == _thread_in_vm, "assumption");
 188           Thread* THREAD = thread;
 189           // Clear pending OOM if reallocation fails and return false, i.e. no objects deoptimized.
 190           realloc_failures = realloc_objects(thread, &deoptee, map, objects, CHECK_AND_CLEAR_false);
 191         } else {
 192           // The flag return_oop() indicates call sites which return oop
 193           // in compiled code. Such sites include java method calls,
 194           // runtime calls (for example, used to allocate new objects/arrays
 195           // on slow code path) and any other calls generated in compiled code.
 196           // It is not guaranteed that we can get such information here only
 197           // by analyzing bytecode in deoptimized frames. This is why this flag
 198           // is set during method compilation (see Compile::Process_OopMap_Node()).
 199           // If the previous frame was popped or if we are dispatching an exception,
 200           // we don't have an oop result.
 201           bool save_oop_result = chunk->at(0)->scope()->return_oop() && !thread->popframe_forcing_deopt_reexecution() && (exec_mode == Unpack_deopt);
 202           Handle return_value;
 203           if (save_oop_result) {
 204             // Reallocation may trigger GC. If deoptimization happened on return from
 205             // call which returns oop we need to save it since it is not in oopmap.
 206             oop result = deoptee.saved_oop_result(map);
 207             assert(oopDesc::is_oop_or_null(result), "must be oop");
 208             return_value = Handle(thread, result);
 209             assert(Universe::heap()->is_in_or_null(result), "must be heap pointer");
 210             if (TraceDeoptimization) {
 211               ttyLocker ttyl;
 212               tty->print_cr("SAVED OOP RESULT " INTPTR_FORMAT " in thread " INTPTR_FORMAT, p2i(result), p2i(thread));
 213             }
 214           }

 215           JRT_BLOCK
 216             realloc_failures = realloc_objects(thread, &deoptee, map, objects, THREAD);
 217           JRT_END
 218           if (save_oop_result) {
 219             // Restore result.
 220             deoptee.set_saved_oop_result(map, return_value());
 221           }
 222           if (JVMTIEscapeBarrier::objs_are_deoptimized(deoptee_thread, deoptee.id())) {
 223             // A concurrent JVMTI agent thread stop the current thread in the JRT_BLOCK above
 224             // and deoptimized its objects
 225             realloc_failures = false; // ignore realloc failures if any occurred
 226             return false;             // current thread did not deoptimize objects
 227           }
 228         }
 229         CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
 230         bool skip_internal = (cm != NULL) && !cm->is_compiled_by_jvmci();
 231         reassign_fields(&deoptee, map, objects, realloc_failures, skip_internal);
 232         deoptimized_objects = true;
 233 #ifndef PRODUCT
 234         if (TraceDeoptimization) {
 235           ttyLocker ttyl;
 236           tty->print_cr("REALLOC OBJECTS in thread " INTPTR_FORMAT, p2i(deoptee_thread));
 237           print_objects(objects, realloc_failures);
 238         }
 239 #endif
 240       }





 241     }
 242     NOT_JVMCI(if (EliminateLocks))
 243     {
 244 #ifndef PRODUCT
 245       bool first = true;
 246 #endif
 247       for (int i = 0; i < chunk->length(); i++) {
 248         compiledVFrame* cvf = chunk->at(i);
 249         assert (cvf->scope() != NULL,"expect only compiled java frames");
 250         GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
 251         if (monitors->is_nonempty()) {
 252           bool relocked = relock_objects(thread, monitors, deoptee_thread, &deoptee, exec_mode, realloc_failures);
 253           deoptimized_objects = deoptimized_objects || relocked;
 254 #ifndef PRODUCT
 255           if (PrintDeoptimizationDetails) {
 256             ttyLocker ttyl;
 257             for (int j = 0; j < monitors->length(); j++) {
 258               MonitorInfo* mi = monitors->at(j);
 259               if (mi->eliminated()) {
 260                 if (first) {
 261                   first = false;
 262                   tty->print_cr("RELOCK OBJECTS in thread " INTPTR_FORMAT, p2i(thread));
 263                 }
 264                 if (exec_mode == Deoptimization::Unpack_none) {
 265                   ObjectMonitor* monitor = deoptee_thread->current_waiting_monitor();
 266                   if (monitor != NULL && (oop)monitor->object() == mi->owner()) {
 267                     tty->print_cr("     object <" INTPTR_FORMAT "> DEFERRED relocking after wait", p2i(mi->owner()));
 268                     continue;
 269                   }
 270                 }
 271                 if (mi->owner_is_scalar_replaced()) {
 272                   Klass* k = java_lang_Class::as_Klass(mi->owner_klass());
 273                   tty->print_cr("     failed reallocation for klass %s", k->external_name());
 274                 } else {
 275                   tty->print_cr("     object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
 276                 }
 277               }
 278             }
 279           }
 280 #endif // !PRODUCT
 281         }
 282       }

 283     }
 284   }
 285   return deoptimized_objects;
 286 }
 287 #endif // COMPILER2_OR_JVMCI
 288 
 289 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
 290 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
 291 
 292   // Note: there is a safepoint safety issue here. No matter whether we enter
 293   // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
 294   // the vframeArray is created.
 295   //
 296 
 297   // Allocate our special deoptimization ResourceMark
 298   DeoptResourceMark* dmark = new DeoptResourceMark(thread);
 299   assert(thread->deopt_mark() == NULL, "Pending deopt!");
 300   thread->set_deopt_mark(dmark);
 301 
 302   frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect
 303   RegisterMap map(thread, true);
 304   RegisterMap dummy_map(thread, false);
 305   // Now get the deoptee with a valid map
 306   frame deoptee = stub_frame.sender(&map);
 307   // Set the deoptee nmethod
 308   assert(thread->deopt_compiled_method() == NULL, "Pending deopt!");
 309   CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
 310   thread->set_deopt_compiled_method(cm);
 311 
 312   if (VerifyStack) {
 313     thread->validate_frame_layout();
 314   }
 315 
 316   // Create a growable array of VFrames where each VFrame represents an inlined
 317   // Java frame.  This storage is allocated with the usual system arena.
 318   assert(deoptee.is_compiled_frame(), "Wrong frame type");
 319   GrowableArray<compiledVFrame*>* chunk = new GrowableArray<compiledVFrame*>(10);
 320   vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
 321   while (!vf->is_top()) {
 322     assert(vf->is_compiled_frame(), "Wrong frame type");
 323     chunk->push(compiledVFrame::cast(vf));
 324     vf = vf->sender();
 325   }
 326   assert(vf->is_compiled_frame(), "Wrong frame type");
 327   chunk->push(compiledVFrame::cast(vf));
 328 
 329   bool realloc_failures = false;
 330 
 331 #if COMPILER2_OR_JVMCI
 332   // Reallocate the non-escaping objects and restore their fields. Then
 333   // relock objects if synchronization on them was eliminated.
 334   if (!JVMTIEscapeBarrier::objs_are_deoptimized(thread, deoptee.id())) {
 335     // objects are not yet deoptimized, do it now
 336     deoptimize_objects(thread, chunk, realloc_failures, exec_mode);
 337   } else {
 338     // objects have been deoptimized already for JVMTI access
 339     if (TraceDeoptimization) {
 340       ttyLocker ttyl;
 341       tty->print_cr("ALREADY DEOPTIMIZED OBJECTS for thread " INTPTR_FORMAT, p2i(thread));
 342     }
 343   }
 344 #endif // COMPILER2_OR_JVMCI
 345 
 346   ScopeDesc* trap_scope = chunk->at(0)->scope();
 347   Handle exceptionObject;
 348   if (trap_scope->rethrow_exception()) {
 349     if (PrintDeoptimizationDetails) {
 350       tty->print_cr("Exception to be rethrown in the interpreter for method %s::%s at bci %d", trap_scope->method()->method_holder()->name()->as_C_string(), trap_scope->method()->name()->as_C_string(), trap_scope->bci());
 351     }
 352     GrowableArray<ScopeValue*>* expressions = trap_scope->expressions();
 353     guarantee(expressions != NULL && expressions->length() > 0, "must have exception to throw");
 354     ScopeValue* topOfStack = expressions->top();
 355     exceptionObject = StackValue::create_stack_value(&deoptee, &map, topOfStack)->get_obj();
 356     guarantee(exceptionObject() != NULL, "exception oop can not be null");
 357   }
 358 
 359   // Ensure that no safepoint is taken after pointers have been stored
 360   // in fields of rematerialized objects.  If a safepoint occurs from here on
 361   // out the java state residing in the vframeArray will be missed.
 362   NoSafepointVerifier no_safepoint;
 363 


 374   // Now that the vframeArray has been created if we have any deferred local writes
 375   // added by jvmti then we can free up that structure as the data is now in the
 376   // vframeArray
 377 
 378   if (thread->deferred_locals() != NULL) {
 379     GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
 380     int i = 0;
 381     do {
 382       // Because of inlining we could have multiple vframes for a single frame
 383       // and several of the vframes could have deferred writes. Find them all.
 384       if (list->at(i)->id() == array->original().id()) {
 385         jvmtiDeferredLocalVariableSet* dlv = list->at(i);
 386         list->remove_at(i);
 387         // individual jvmtiDeferredLocalVariableSet are CHeapObj's
 388         delete dlv;
 389       } else {
 390         i++;
 391       }
 392     } while ( i < list->length() );
 393     if (list->length() == 0) {
 394       JvmtiDeferredUpdates* updates = thread->deferred_updates();
 395       thread->reset_deferred_updates();
 396       // free deferred updates.
 397       delete updates;
 398     }
 399 
 400   }
 401 
 402   // Compute the caller frame based on the sender sp of stub_frame and stored frame sizes info.
 403   CodeBlob* cb = stub_frame.cb();
 404   // Verify we have the right vframeArray
 405   assert(cb->frame_size() >= 0, "Unexpected frame size");
 406   intptr_t* unpack_sp = stub_frame.sp() + cb->frame_size();
 407 
 408   // If the deopt call site is a MethodHandle invoke call site we have
 409   // to adjust the unpack_sp.
 410   nmethod* deoptee_nm = deoptee.cb()->as_nmethod_or_null();
 411   if (deoptee_nm != NULL && deoptee_nm->is_method_handle_return(deoptee.pc()))
 412     unpack_sp = deoptee.unextended_sp();
 413 
 414 #ifdef ASSERT
 415   assert(cb->is_deoptimization_stub() ||
 416          cb->is_uncommon_trap_stub() ||
 417          strcmp("Stub<DeoptimizationStub.deoptimizationHandler>", cb->name()) == 0 ||


 937         delete s;
 938       }
 939     }
 940     return _singleton;
 941   }
 942   oop lookup_raw(intptr_t raw_value) {
 943     // Have to cast to avoid little/big-endian problems.
 944     jboolean value = (jboolean)*((jint*)&raw_value);
 945     return lookup(value);
 946   }
 947   oop lookup(jboolean value) {
 948     if (value != 0) {
 949       return JNIHandles::resolve_non_null(_true_cache);
 950     }
 951     return JNIHandles::resolve_non_null(_false_cache);
 952   }
 953 };
 954 
 955 BooleanBoxCache* BooleanBoxCache::_singleton = NULL;
 956 
 957 oop Deoptimization::get_cached_box(AutoBoxObjectValue* bv, frame* fr, const RegisterMap* reg_map, TRAPS) {
 958    Klass* k = java_lang_Class::as_Klass(bv->klass()->as_ConstantOopReadValue()->value()());
 959    BasicType box_type = SystemDictionary::box_klass_type(k);
 960    if (box_type != T_OBJECT) {
 961      StackValue* value = StackValue::create_stack_value(fr, reg_map, bv->field_at(box_type == T_LONG ? 1 : 0));
 962      switch(box_type) {
 963        case T_INT:     return IntegerBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 964        case T_CHAR:    return CharacterBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 965        case T_SHORT:   return ShortBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 966        case T_BYTE:    return ByteBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 967        case T_BOOLEAN: return BooleanBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 968        case T_LONG:    return LongBoxCache::singleton(THREAD)->lookup_raw(value->get_int());
 969        default:;
 970      }
 971    }
 972    return NULL;
 973 }
 974 #endif // INCLUDE_JVMCI || INCLUDE_AOT
 975 
 976 #if COMPILER2_OR_JVMCI
 977 bool Deoptimization::realloc_objects(JavaThread* thread, frame* fr, const RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, TRAPS) {
 978   Handle pending_exception(THREAD, thread->pending_exception());
 979   const char* exception_file = thread->exception_file();
 980   int exception_line = thread->exception_line();
 981   thread->clear_pending_exception();
 982 
 983   bool failures = false;
 984 
 985   for (int i = 0; i < objects->length(); i++) {
 986     assert(objects->at(i)->is_object(), "invalid debug information");
 987     ObjectValue* sv = (ObjectValue*) objects->at(i);
 988 
 989     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
 990     oop obj = NULL;
 991 
 992     if (k->is_instance_klass()) {
 993 #if INCLUDE_JVMCI || INCLUDE_AOT
 994       CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
 995       if (cm->is_compiled_by_jvmci() && sv->is_auto_box()) {
 996         AutoBoxObjectValue* abv = (AutoBoxObjectValue*) sv;
 997         obj = get_cached_box(abv, fr, reg_map, THREAD);


1018     if (obj == NULL) {
1019       failures = true;
1020     }
1021 
1022     assert(sv->value().is_null(), "redundant reallocation");
1023     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
1024     CLEAR_PENDING_EXCEPTION;
1025     sv->set_value(obj);
1026   }
1027 
1028   if (failures) {
1029     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
1030   } else if (pending_exception.not_null()) {
1031     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
1032   }
1033 
1034   return failures;
1035 }
1036 
1037 // restore elements of an eliminated type array
1038 void Deoptimization::reassign_type_array_elements(frame* fr, const RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
1039   int index = 0;
1040   intptr_t val;
1041 
1042   for (int i = 0; i < sv->field_size(); i++) {
1043     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1044     switch(type) {
1045     case T_LONG: case T_DOUBLE: {
1046       assert(value->type() == T_INT, "Agreement.");
1047       StackValue* low =
1048         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
1049 #ifdef _LP64
1050       jlong res = (jlong)low->get_int();
1051 #else
1052 #ifdef SPARC
1053       // For SPARC we have to swap high and low words.
1054       jlong res = jlong_from((jint)low->get_int(), (jint)value->get_int());
1055 #else
1056       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());
1057 #endif //SPARC
1058 #endif


1115       assert(value->type() == T_INT, "Agreement.");
1116       val = value->get_int();
1117       obj->byte_at_put(index, (jbyte)*((jint*)&val));
1118       break;
1119 
1120     case T_BOOLEAN:
1121       assert(value->type() == T_INT, "Agreement.");
1122       val = value->get_int();
1123       obj->bool_at_put(index, (jboolean)*((jint*)&val));
1124       break;
1125 
1126       default:
1127         ShouldNotReachHere();
1128     }
1129     index++;
1130   }
1131 }
1132 
1133 
1134 // restore fields of an eliminated object array
1135 void Deoptimization::reassign_object_array_elements(frame* fr, const RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj) {
1136   for (int i = 0; i < sv->field_size(); i++) {
1137     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1138     assert(value->type() == T_OBJECT, "object element expected");
1139     obj->obj_at_put(i, value->get_obj()());
1140   }
1141 }
1142 
1143 class ReassignedField {
1144 public:
1145   int _offset;
1146   BasicType _type;
1147 public:
1148   ReassignedField() {
1149     _offset = 0;
1150     _type = T_ILLEGAL;
1151   }
1152 };
1153 
1154 int compare(ReassignedField* left, ReassignedField* right) {
1155   return left->_offset - right->_offset;
1156 }
1157 
1158 // Restore fields of an eliminated instance object using the same field order
1159 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
1160 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, const RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
1161   if (klass->superklass() != NULL) {
1162     svIndex = reassign_fields_by_klass(klass->superklass(), fr, reg_map, sv, svIndex, obj, skip_internal);
1163   }
1164 
1165   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
1166   for (AllFieldStream fs(klass); !fs.done(); fs.next()) {
1167     if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
1168       ReassignedField field;
1169       field._offset = fs.offset();
1170       field._type = FieldType::basic_type(fs.signature());
1171       fields->append(field);
1172     }
1173   }
1174   fields->sort(compare);
1175   for (int i = 0; i < fields->length(); i++) {
1176     intptr_t val;
1177     ScopeValue* scope_field = sv->field_at(svIndex);
1178     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
1179     int offset = fields->at(i)._offset;
1180     BasicType type = fields->at(i)._type;


1248         assert(value->type() == T_INT, "Agreement.");
1249         val = value->get_int();
1250         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1251         break;
1252 
1253       case T_BOOLEAN:
1254         assert(value->type() == T_INT, "Agreement.");
1255         val = value->get_int();
1256         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1257         break;
1258 
1259       default:
1260         ShouldNotReachHere();
1261     }
1262     svIndex++;
1263   }
1264   return svIndex;
1265 }
1266 
1267 // restore fields of all eliminated objects and arrays
1268 void Deoptimization::reassign_fields(frame* fr, const RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1269   for (int i = 0; i < objects->length(); i++) {
1270     ObjectValue* sv = (ObjectValue*) objects->at(i);
1271     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1272     Handle obj = sv->value();
1273     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1274     if (PrintDeoptimizationDetails) {
1275       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1276     }
1277     if (obj.is_null()) {
1278       continue;
1279     }
1280 #if INCLUDE_JVMCI || INCLUDE_AOT
1281     // Don't reassign fields of boxes that came from a cache. Caches may be in CDS.
1282     if (sv->is_auto_box() && ((AutoBoxObjectValue*) sv)->is_cached()) {
1283       continue;
1284     }
1285 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1286     if (k->is_instance_klass()) {
1287       InstanceKlass* ik = InstanceKlass::cast(k);
1288       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);
1289     } else if (k->is_typeArray_klass()) {
1290       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1291       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1292     } else if (k->is_objArray_klass()) {
1293       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1294     }
1295   }
1296 }
1297 
1298 
1299 // relock objects for which synchronization was eliminated
1300 bool Deoptimization::relock_objects(JavaThread* thread, GrowableArray<MonitorInfo*>* monitors,
1301                                     JavaThread* deoptee_thread, frame* fr, int exec_mode, bool realloc_failures) {
1302   bool relocked_objects = false;
1303   for (int i = 0; i < monitors->length(); i++) {
1304     MonitorInfo* mon_info = monitors->at(i);
1305     if (mon_info->eliminated()) {
1306       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1307       relocked_objects = true;
1308       if (!mon_info->owner_is_scalar_replaced()) {
1309         Handle obj(thread, mon_info->owner());
1310         markWord mark = obj->mark();
1311         if (UseBiasedLocking && mark.has_bias_pattern()) {
1312           // New allocated objects may have the mark set to anonymously biased.
1313           // Also the deoptimized method may called methods with synchronization
1314           // where the thread-local object is bias locked to the current thread.
1315           assert(mark.is_biased_anonymously() ||
1316                  mark.biased_locker() == deoptee_thread, "should be locked to current thread");
1317           // Reset mark word to unbiased prototype.
1318           markWord unbiased_prototype = markWord::prototype().set_age(mark.age());
1319           obj->set_mark(unbiased_prototype);
1320         } else if (exec_mode == Unpack_none) {
1321           if (mark.has_locker() && fr->sp() > (intptr_t*)mark.locker()) {
1322             // With exec_mode == Unpack_none obj may be thread local and locked in
1323             // a callee frame. In this case the bias was revoked before.
1324             // Make the lock in the callee a recursive lock and restore the displaced header.
1325             markWord dmw = mark.displaced_mark_helper();
1326             mark.locker()->set_displaced_header(markWord::encode((BasicLock*) NULL));
1327             obj->set_mark(dmw);
1328           }
1329           if (mark.has_monitor()) {
1330             // defer relocking if the deoptee thread is currently waiting for obj
1331             ObjectMonitor* waiting_monitor = deoptee_thread->current_waiting_monitor();
1332             if (waiting_monitor != NULL && (oop)waiting_monitor->object() == obj()) {
1333               mon_info->lock()->set_displaced_header(markWord::unused_mark());
1334               deoptee_thread->inc_relock_count_after_wait();
1335               continue;
1336             }
1337           }
1338         }
1339         BasicLock* lock = mon_info->lock();
1340         ObjectSynchronizer::enter(obj, lock, deoptee_thread);
1341         assert(mon_info->owner()->is_locked(), "object must be locked now");
1342       }
1343     }
1344   }
1345   return relocked_objects;
1346 }
1347 
1348 
1349 #ifndef PRODUCT
1350 // print information about reallocated objects
1351 void Deoptimization::print_objects(GrowableArray<ScopeValue*>* objects, bool realloc_failures) {
1352   fieldDescriptor fd;
1353 
1354   for (int i = 0; i < objects->length(); i++) {
1355     ObjectValue* sv = (ObjectValue*) objects->at(i);
1356     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1357     Handle obj = sv->value();
1358 
1359     tty->print("     object <" INTPTR_FORMAT "> of type ", p2i(sv->value()()));
1360     k->print_value();
1361     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1362     if (obj.is_null()) {
1363       tty->print(" allocation failed");
1364     } else {
1365       tty->print(" allocated (%d bytes)", obj->size() * HeapWordSize);


2504             if (bc_case == BC_CASE_LIMIT && (int)bc == 0)
2505               bc = Bytecodes::_illegal;
2506             sprintf(name, "%s/%s/%s",
2507                     trap_reason_name(reason),
2508                     trap_action_name(action),
2509                     Bytecodes::is_defined(bc)? Bytecodes::name(bc): "other");
2510             juint r = counter >> LSB_BITS;
2511             tty->print_cr("  %40s: " UINT32_FORMAT " (%.1f%%)", name, r, (r * 100.0) / total);
2512             account -= r;
2513           }
2514         }
2515       }
2516     }
2517     if (account != 0) {
2518       PRINT_STAT_LINE("unaccounted", account);
2519     }
2520     #undef PRINT_STAT_LINE
2521     if (xtty != NULL)  xtty->tail("statistics");
2522   }
2523 }
2524 
2525 #ifdef ASSERT
2526 // Revert optimizations based on escape analysis for all compiled frames of all Java threads as if
2527 // objects local to a frame or a thread were escaping. Do it every DeoptimizeObjectsALotInterval
2528 // milliseconds.
2529 void Deoptimization::deoptimize_objects_alot_loop() {
2530   JavaThread* ct = JavaThread::current();
2531   HandleMark hm(ct);
2532   while (!ct->is_terminated()) {
2533     { // Begin new scope for escape barrier
2534       HandleMarkCleaner hmc(ct);
2535       ResourceMark rm(ct);
2536       JVMTIEscapeBarrier eb(ct, true);
2537       eb.deoptimize_objects_all_threads();
2538     }
2539     // Now sleep after the escape barriers destructor resumed the java threads.
2540     os::sleep(ct, DeoptimizeObjectsALotInterval, true);
2541   }
2542 }
2543 #endif // !ASSERT
2544 
2545 // Returns true iff objects were reallocated and relocked because of access through JVMTI
2546 bool JVMTIEscapeBarrier::objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
2547   // first/oldest update holds the flag
2548   GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
2549   bool result = false;
2550   if (list != NULL ) {
2551     for (int i = 0; i < list->length(); i++) {
2552       if (list->at(i)->matches(fr_id)) {
2553         result = list->at(i)->objects_are_deoptimized();
2554         break;
2555       }
2556     }
2557   }
2558   return result;
2559 }
2560 
2561 // Remember that objects were reallocated and relocked for the compiled frame with the given id
2562 void JVMTIEscapeBarrier::set_objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
2563   // set in first/oldest update
2564   GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread->deferred_locals();
2565   DEBUG_ONLY(bool found = false);
2566   if (list != NULL ) {
2567     for (int i = 0; i < list->length(); i++) {
2568       if (list->at(i)->matches(fr_id)) {
2569         DEBUG_ONLY(found = true);
2570         list->at(i)->set_objs_are_deoptimized();
2571         break;
2572       }
2573     }
2574   }
2575   assert(found, "variable set should exist at least for one vframe");
2576 }
2577 
2578 bool JVMTIEscapeBarrier::deoptimize_objects(compiledVFrame* cvf) {
2579   return !barrier_active() || deoptimize_objects(deoptee_thread(), cvf->fr(), cvf->register_map());
2580 }
2581 
2582 // Deoptimize frames with non escaping objects. Deoptimize objects with optimizations based on
2583 // escape analysis. Do it for all frames within the given depth and continue from there until the
2584 // entry frame is reached, because thread local objects passed as arguments might escape from callee
2585 // frames within the given depth.
2586 bool JVMTIEscapeBarrier::deoptimize_objects(int depth) {
2587   if (barrier_active() && deoptee_thread()->has_last_Java_frame()) {
2588     ResourceMark rm;
2589     HandleMark   hm;
2590     RegisterMap  reg_map(deoptee_thread());
2591     vframe* vf = deoptee_thread()->last_java_vframe(&reg_map);
2592     int cur_depth = 0;
2593     while (vf != NULL && ((cur_depth <= depth) || !vf->is_entry_frame())) {
2594       if (vf->is_compiled_frame()) {
2595         compiledVFrame* cvf = compiledVFrame::cast(vf);
2596         // Deoptimize frame and local objects if any exist.
2597         // If cvf is deeper than depth, then we must only deoptimize if local objects are passed as args.
2598         bool should_deopt = cur_depth <= depth ? cvf->not_global_escape_in_scope() : cvf->arg_escape();
2599         if (should_deopt && !deoptimize_objects(cvf)) {
2600           // reallocation of scalar replaced objects failed, because heap is exhausted
2601           return false;
2602         }
2603       }
2604 
2605       // move to next physical frame
2606       while(!vf->is_top()) {
2607         cur_depth++;
2608         vf = vf->sender();
2609       }
2610       cur_depth++;
2611       vf = vf->sender();
2612     }
2613   }
2614   return true;
2615 }
2616 
2617 bool JVMTIEscapeBarrier::deoptimize_objects(intptr_t* fr_id) {
2618   if (!barrier_active()) return true;
2619   // Compute frame and register map based on thread and sp.
2620   RegisterMap reg_map(deoptee_thread());
2621   frame fr = deoptee_thread()->last_frame();
2622   while (fr.id() != fr_id) {
2623     fr = fr.sender(&reg_map);
2624   }
2625   return deoptimize_objects(deoptee_thread(), fr, &reg_map);
2626 }
2627 
2628 
2629 bool JVMTIEscapeBarrier::deoptimize_objects_all_threads() {
2630   if (!barrier_active()) return true;
2631   ResourceMark rm(calling_thread());
2632   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2633     if (jt->has_last_Java_frame()) {
2634       RegisterMap reg_map(jt);
2635       vframe* vf = jt->last_java_vframe(&reg_map);
2636       assert(jt->frame_anchor()->walkable(),
2637              "The stack of JavaThread " PTR_FORMAT " is not walkable. Thread state is %d",
2638              p2i(jt), jt->thread_state());
2639       while (vf != NULL) {
2640         if (vf->is_compiled_frame()) {
2641           compiledVFrame* cvf = compiledVFrame::cast(vf);
2642           if ((cvf->not_global_escape_in_scope() || cvf->arg_escape()) &&
2643               !deoptimize_objects(jt, cvf->fr(), cvf->register_map())) {
2644             return false; // reallocation failure
2645           }
2646         }
2647         // move to next physical frame
2648         while(!vf->is_top()) {
2649           vf = vf->sender();
2650         }
2651         vf = vf->sender();
2652       }
2653     }
2654   }
2655   return true; // success
2656 }
2657 
2658 bool JVMTIEscapeBarrier::_deoptimizing_objects_for_all_threads = false;
2659 bool JVMTIEscapeBarrier::_self_deoptimization_in_progress      = false;
2660 
2661 bool JVMTIEscapeBarrier::deoptimizing_objects_for_all_threads() {
2662   assert(Threads_lock->owned_by_self(), "Threads_lock required");
2663   return _deoptimizing_objects_for_all_threads;
2664 }
2665 
2666 void JVMTIEscapeBarrier::set_deoptimizing_objects_for_all_threads(bool v) {
2667   assert(Threads_lock->owned_by_self(), "Threads_lock required");
2668   _deoptimizing_objects_for_all_threads = v;
2669   if (!_deoptimizing_objects_for_all_threads) {
2670     Threads_lock->notify_all(); // notify waiting threads
2671   }
2672 }
2673 
2674 void JVMTIEscapeBarrier::sync_and_suspend_one() {
2675   assert(_calling_thread != NULL, "calling thread must not be NULL");
2676   assert(_deoptee_thread != NULL, "deoptee thread must not be NULL");
2677   assert(barrier_active(), "should not call");
2678 
2679   // Sync with other threads that might be doing deoptimizations
2680   {
2681     MutexLocker ml(JvmtiObjReallocRelock_lock);
2682     while (_self_deoptimization_in_progress) {
2683       JvmtiObjReallocRelock_lock->wait();
2684     }
2685 
2686     if (self_deopt()) {
2687       _self_deoptimization_in_progress = true;
2688     }
2689 
2690     while (_deoptee_thread->is_ea_obj_deopt_suspend()) {
2691       JvmtiObjReallocRelock_lock->wait();
2692     }
2693 
2694     if (self_deopt()) {
2695       return;
2696     }
2697 
2698     // set suspend flag for target thread
2699     _deoptee_thread->set_ea_obj_deopt_flag();
2700   }
2701 
2702   // suspend target thread
2703   uint32_t debug_bits = 0;
2704   if (!_deoptee_thread->is_thread_fully_suspended(false, &debug_bits)) {
2705     class NopClosure : public ThreadClosure {
2706       void do_thread(Thread* th) { }
2707     } nop;
2708     Handshake::execute(&nop, _deoptee_thread);
2709   }
2710   assert(!_deoptee_thread->has_last_Java_frame() || _deoptee_thread->frame_anchor()->walkable(),
2711          "stack should be walkable now");
2712 }
2713 
2714 class VM_ThreadSuspendAllForObjDeopt : public VM_Operation {
2715   public:
2716    VMOp_Type type() const { return VMOp_ThreadSuspendAllForObjDeopt; }
2717    virtual void doit() {
2718      Thread* ct = calling_thread();
2719      for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2720        if (jt->is_hidden_from_external_view()) continue;
2721        assert(!jt->is_ea_obj_deopt_suspend(), "bad synchronization");
2722        if (ct != jt) {
2723          jt->set_ea_obj_deopt_flag();
2724        }
2725      }
2726      JVMTIEscapeBarrier::set_deoptimizing_objects_for_all_threads(true);
2727    }
2728 };
2729 
2730 void JVMTIEscapeBarrier::sync_and_suspend_all() {
2731   assert(barrier_active(), "should not call");
2732   assert(_calling_thread != NULL, "calling thread must not be NULL");
2733   assert(all_threads(), "sanity");
2734 
2735   // Sync with other threads that might be doing deoptimizations
2736   {
2737     MutexLocker ml(JvmtiObjReallocRelock_lock);
2738     while (_self_deoptimization_in_progress) {
2739       JvmtiObjReallocRelock_lock->wait();
2740     }
2741 
2742     _self_deoptimization_in_progress = true;
2743 
2744     bool deopt_in_progress;
2745     do {
2746       deopt_in_progress = false;
2747       for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2748         if (jt->is_ea_obj_deopt_suspend()) {
2749           deopt_in_progress = true;
2750           JvmtiObjReallocRelock_lock->wait();
2751           break; // check all threads again
2752         }
2753       }
2754     } while(deopt_in_progress);
2755   }
2756 
2757   VM_ThreadSuspendAllForObjDeopt vm_suspend_all;
2758   VMThread::execute(&vm_suspend_all);
2759 #ifdef ASSERT
2760   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2761     if (jt->is_hidden_from_external_view()) continue;
2762     assert(!jt->has_last_Java_frame() || jt->frame_anchor()->walkable(),
2763            "The stack of JavaThread " PTR_FORMAT " is not walkable. Thread state is %d",
2764            p2i(jt), jt->thread_state());
2765   }
2766 #endif // ASSERT
2767 }
2768 
2769 void JVMTIEscapeBarrier::resume_one() {
2770   assert(barrier_active(), "should not call");
2771   assert(!all_threads(), "use resume_all()");
2772   MutexLocker ml(JvmtiObjReallocRelock_lock);
2773   if (self_deopt()) {
2774     assert(_self_deoptimization_in_progress, "incorrect synchronization");
2775     _self_deoptimization_in_progress = false;
2776   } else {
2777     _deoptee_thread->clear_ea_obj_deopt_flag();
2778   }
2779   JvmtiObjReallocRelock_lock->notify_all();
2780 }
2781 
2782 void JVMTIEscapeBarrier::resume_all() {
2783   assert(barrier_active(), "should not call");
2784   assert(all_threads(), "use resume_one()");
2785   {
2786     MutexLocker l1(Threads_lock);
2787     set_deoptimizing_objects_for_all_threads(false);
2788   }
2789   MutexLocker l2(JvmtiObjReallocRelock_lock);
2790   assert(_self_deoptimization_in_progress, "incorrect synchronization");
2791   _self_deoptimization_in_progress = false;
2792   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2793     jt->clear_ea_obj_deopt_flag();
2794   }
2795   JvmtiObjReallocRelock_lock->notify_all();
2796 }
2797 
2798 // Deoptimize the given frame and deoptimize objects with optimizations based on escape analysis,
2799 // i.e. reallocate scalar replaced objects on the heap and relock objects if locking has been
2800 // eliminated.
2801 // Deoptimized objects are kept as JVMTI deferred updates until the compiled frame is replaced with interpreter frames.
2802 // Returns false iff at least one reallocation failed.
2803 bool JVMTIEscapeBarrier::deoptimize_objects(JavaThread* deoptee, frame fr, const RegisterMap *reg_map) {
2804   if (!barrier_active()) return true;
2805 
2806   JavaThread* ct = calling_thread();
2807   bool realloc_failures = false;
2808 
2809   assert(fr.is_compiled_frame(), "only compiled frames can contain stack allocated objects");
2810   assert(reg_map->update_map(), "e.g. for values in callee saved registers");
2811 
2812   if (!objs_are_deoptimized(deoptee, fr.id())) {
2813     // Execution must not continue in the compiled method, so we deoptimize the frame.
2814     // As a side effect all locking biases will be removed which makes relocking
2815     // of eliminated nested locks easier.
2816     compiledVFrame* last_cvf = compiledVFrame::cast(vframe::new_vframe(&fr, reg_map, deoptee));
2817     if (!fr.is_deoptimized_frame()) {
2818       Deoptimization::deoptimize_frame(deoptee, fr.id());
2819 
2820       // the frame fr is stale after the deoptimization, we have to fetch it again
2821       StackFrameStream fst(deoptee);
2822       while (fst.current()->id() != fr.id() && !fst.is_done()) {
2823         fst.next();
2824       }
2825       assert(fst.current()->id() == fr.id(), "frame not found after deoptimization");
2826       last_cvf = compiledVFrame::cast(vframe::new_vframe(fst.current(), fst.register_map(), deoptee));
2827     }
2828 
2829     // collect inlined frames
2830     compiledVFrame* cvf = last_cvf;
2831     GrowableArray<compiledVFrame*>* vfs = new GrowableArray<compiledVFrame*>(10);
2832     while (!cvf->is_top()) {
2833       vfs->push(cvf);
2834       cvf = compiledVFrame::cast(cvf->sender());
2835     }
2836     vfs->push(cvf);
2837 
2838     // With the exception of not escaping owners biases where revoked when the deoptimization of fr
2839     // was requested. Among the non escaping owners of eliminated locks might be some that are still
2840     // locked in callee frames. We need their biases revoked and do it here, because we cannot
2841     // safepoint in relock_objects(). Note that the markword of such an owner will then point to a
2842     // callee frame. This will be fixed in relock_objects().
2843     if (UseBiasedLocking && last_cvf->arg_escape()) {
2844       GrowableArray<Handle>* arg_esc_owners = new GrowableArray<Handle>();
2845       for (int i = 0; i < vfs->length(); i++) {
2846         GrowableArray<MonitorInfo*>* monitors = vfs->at(i)->monitors();
2847         for (int j = 0; j < monitors->length(); j++) {
2848           MonitorInfo* mon_info = monitors->at(j);
2849           oop owner = mon_info->owner_is_scalar_replaced() ? oop(NULL) : mon_info->owner();
2850           if (mon_info->eliminated() && owner != NULL) {
2851             markWord mark = owner->mark();
2852             if (mark.has_bias_pattern() && !mark.is_biased_anonymously()) {
2853               assert(mark.biased_locker() == deoptee, "not escaping object can only be biased to current thread");
2854               arg_esc_owners->push(Handle(ct, owner));
2855             }
2856           }
2857         }
2858       }
2859       if (arg_esc_owners->length() > 0) {
2860         BiasedLocking::revoke(arg_esc_owners, deoptee);
2861       }
2862     }
2863 
2864     // reallocate and relock optimized objects
2865     bool deoptimized_objects = Deoptimization::deoptimize_objects(ct, vfs, realloc_failures, Deoptimization::Unpack_none);
2866     if (!realloc_failures && deoptimized_objects) {
2867       // now do the updates
2868       for (int frame_index = 0; frame_index < vfs->length(); frame_index++) {
2869         cvf = vfs->at(frame_index);
2870 
2871         // locals
2872         GrowableArray<ScopeValue*>* scopeLocals = cvf->scope()->locals();
2873         StackValueCollection* locals = cvf->locals();
2874         if (locals != NULL) {
2875           for (int i2 = 0; i2 < locals->size(); i2++) {
2876             StackValue* var = locals->at(i2);
2877             if (var->type() == T_OBJECT && scopeLocals->at(i2)->is_object()) {
2878               jvalue val;
2879               val.l = (jobject) locals->at(i2)->get_obj()();
2880               cvf->update_local(T_OBJECT, i2, val);
2881             }
2882           }
2883         }
2884 
2885         // expressions
2886         GrowableArray<ScopeValue*>* scopeExpressions = cvf->scope()->expressions();
2887         StackValueCollection* expressions = cvf->expressions();
2888         if (expressions != NULL) {
2889           for (int i2 = 0; i2 < expressions->size(); i2++) {
2890             StackValue* var = expressions->at(i2);
2891             if (var->type() == T_OBJECT && scopeExpressions->at(i2)->is_object()) {
2892               jvalue val;
2893               val.l = (jobject) expressions->at(i2)->get_obj()();
2894               cvf->update_stack(T_OBJECT, i2, val);
2895             }
2896           }
2897         }
2898 
2899         // monitors
2900         GrowableArray<MonitorInfo*>* monitors = cvf->monitors();
2901         if (monitors != NULL) {
2902           for (int i2 = 0; i2 < monitors->length(); i2++) {
2903             if (monitors->at(i2)->eliminated()) {
2904               assert(!monitors->at(i2)->owner_is_scalar_replaced(), "reallocation failure, should not update");
2905               cvf->update_monitor(i2, monitors->at(i2));
2906             }
2907           }
2908         }
2909       }
2910       set_objs_are_deoptimized(deoptee, fr.id());
2911     }
2912   }
2913   return !realloc_failures;
2914 }
2915 
2916 #else // COMPILER2_OR_JVMCI
2917 
2918 
2919 // Stubs for C1 only system.
2920 bool Deoptimization::trap_state_is_recompiled(int trap_state) {
2921   return false;
2922 }
2923 
2924 const char* Deoptimization::trap_reason_name(int reason) {
2925   return "unknown";
2926 }
2927 
2928 void Deoptimization::print_statistics() {
2929   // no output
2930 }
2931 
2932 void
2933 Deoptimization::update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason) {
2934   // no udpate
2935 }
< prev index next >